1 """Python wrappers around TensorFlow ops.
   2 
   3 This file is MACHINE GENERATED! Do not edit.
   4 Original C++ source file: dataset_ops.cc
   5 """
   6 
   7 import collections as _collections
   8 import six as _six
   9 
  10 from tensorflow.python import pywrap_tensorflow as _pywrap_tensorflow
  11 from tensorflow.python.eager import context as _context
  12 from tensorflow.python.eager import core as _core
  13 from tensorflow.python.eager import execute as _execute
  14 from tensorflow.python.framework import dtypes as _dtypes
  15 from tensorflow.python.framework import errors as _errors
  16 from tensorflow.python.framework import tensor_shape as _tensor_shape
  17 
  18 from tensorflow.core.framework import op_def_pb2 as _op_def_pb2
  19 # Needed to trigger the call to _set_call_cpp_shape_fn.
  20 from tensorflow.python.framework import common_shapes as _common_shapes
  21 from tensorflow.python.framework import op_def_registry as _op_def_registry
  22 from tensorflow.python.framework import ops as _ops
  23 from tensorflow.python.framework import op_def_library as _op_def_library
  24 from tensorflow.python.util.deprecation import deprecated_endpoints
  25 from tensorflow.python.util.tf_export import tf_export
  26 
  27 
  28 def anonymous_iterator(output_types, output_shapes, name=None):
  29   r"""A container for an iterator resource.
  30 
  31   Args:
  32     output_types: A list of `tf.DTypes` that has length `>= 1`.
  33     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
  34     name: A name for the operation (optional).
  35 
  36   Returns:
  37     A `Tensor` of type `resource`.
  38   """
  39   _ctx = _context._context
  40   if _ctx is None or not _ctx._eager_context.is_eager:
  41     if not isinstance(output_types, (list, tuple)):
  42       raise TypeError(
  43           "Expected list for 'output_types' argument to "
  44           "'anonymous_iterator' Op, not %r." % output_types)
  45     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
  46     if not isinstance(output_shapes, (list, tuple)):
  47       raise TypeError(
  48           "Expected list for 'output_shapes' argument to "
  49           "'anonymous_iterator' Op, not %r." % output_shapes)
  50     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
  51     _, _, _op = _op_def_lib._apply_op_helper(
  52         "AnonymousIterator", output_types=output_types,
  53         output_shapes=output_shapes, name=name)
  54     _result = _op.outputs[:]
  55     _inputs_flat = _op.inputs
  56     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
  57               _op.get_attr("output_shapes"))
  58     _execute.record_gradient(
  59       "AnonymousIterator", _inputs_flat, _attrs, _result, name)
  60     _result, = _result
  61     return _result
  62 
  63   else:
  64     try:
  65       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
  66         _ctx._context_handle, _ctx._eager_context.device_name,
  67         "AnonymousIterator", name, _ctx._post_execution_callbacks,
  68         "output_types", output_types, "output_shapes", output_shapes)
  69       return _result
  70     except _core._FallbackException:
  71       return anonymous_iterator_eager_fallback(
  72           output_types=output_types, output_shapes=output_shapes, name=name,
  73           ctx=_ctx)
  74     except _core._NotOkStatusException as e:
  75       if name is not None:
  76         message = e.message + " name: " + name
  77       else:
  78         message = e.message
  79       _six.raise_from(_core._status_to_exception(e.code, message), None)
  80 
  81 
  82 def anonymous_iterator_eager_fallback(output_types, output_shapes, name=None, ctx=None):
  83   r"""This is the slowpath function for Eager mode.
  84   This is for function anonymous_iterator
  85   """
  86   _ctx = ctx if ctx else _context.context()
  87   if not isinstance(output_types, (list, tuple)):
  88     raise TypeError(
  89         "Expected list for 'output_types' argument to "
  90         "'anonymous_iterator' Op, not %r." % output_types)
  91   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
  92   if not isinstance(output_shapes, (list, tuple)):
  93     raise TypeError(
  94         "Expected list for 'output_shapes' argument to "
  95         "'anonymous_iterator' Op, not %r." % output_shapes)
  96   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
  97   _inputs_flat = []
  98   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
  99   _result = _execute.execute(b"AnonymousIterator", 1, inputs=_inputs_flat,
 100                              attrs=_attrs, ctx=_ctx, name=name)
 101   _execute.record_gradient(
 102       "AnonymousIterator", _inputs_flat, _attrs, _result, name)
 103   _result, = _result
 104   return _result
 105 
 106 
 107 def batch_dataset(input_dataset, batch_size, output_types, output_shapes, name=None):
 108   r"""Creates a dataset that batches `batch_size` elements from `input_dataset`.
 109 
 110   Args:
 111     input_dataset: A `Tensor` of type `variant`.
 112     batch_size: A `Tensor` of type `int64`.
 113       A scalar representing the number of elements to accumulate in a
 114       batch.
 115     output_types: A list of `tf.DTypes` that has length `>= 1`.
 116     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
 117     name: A name for the operation (optional).
 118 
 119   Returns:
 120     A `Tensor` of type `variant`.
 121   """
 122   _ctx = _context._context
 123   if _ctx is None or not _ctx._eager_context.is_eager:
 124     if not isinstance(output_types, (list, tuple)):
 125       raise TypeError(
 126           "Expected list for 'output_types' argument to "
 127           "'batch_dataset' Op, not %r." % output_types)
 128     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 129     if not isinstance(output_shapes, (list, tuple)):
 130       raise TypeError(
 131           "Expected list for 'output_shapes' argument to "
 132           "'batch_dataset' Op, not %r." % output_shapes)
 133     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 134     _, _, _op = _op_def_lib._apply_op_helper(
 135         "BatchDataset", input_dataset=input_dataset, batch_size=batch_size,
 136         output_types=output_types, output_shapes=output_shapes, name=name)
 137     _result = _op.outputs[:]
 138     _inputs_flat = _op.inputs
 139     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
 140               _op.get_attr("output_shapes"))
 141     _execute.record_gradient(
 142       "BatchDataset", _inputs_flat, _attrs, _result, name)
 143     _result, = _result
 144     return _result
 145 
 146   else:
 147     try:
 148       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 149         _ctx._context_handle, _ctx._eager_context.device_name, "BatchDataset",
 150         name, _ctx._post_execution_callbacks, input_dataset, batch_size,
 151         "output_types", output_types, "output_shapes", output_shapes)
 152       return _result
 153     except _core._FallbackException:
 154       return batch_dataset_eager_fallback(
 155           input_dataset, batch_size, output_types=output_types,
 156           output_shapes=output_shapes, name=name, ctx=_ctx)
 157     except _core._NotOkStatusException as e:
 158       if name is not None:
 159         message = e.message + " name: " + name
 160       else:
 161         message = e.message
 162       _six.raise_from(_core._status_to_exception(e.code, message), None)
 163 
 164 
 165 def batch_dataset_eager_fallback(input_dataset, batch_size, output_types, output_shapes, name=None, ctx=None):
 166   r"""This is the slowpath function for Eager mode.
 167   This is for function batch_dataset
 168   """
 169   _ctx = ctx if ctx else _context.context()
 170   if not isinstance(output_types, (list, tuple)):
 171     raise TypeError(
 172         "Expected list for 'output_types' argument to "
 173         "'batch_dataset' Op, not %r." % output_types)
 174   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 175   if not isinstance(output_shapes, (list, tuple)):
 176     raise TypeError(
 177         "Expected list for 'output_shapes' argument to "
 178         "'batch_dataset' Op, not %r." % output_shapes)
 179   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 180   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
 181   batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
 182   _inputs_flat = [input_dataset, batch_size]
 183   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
 184   _result = _execute.execute(b"BatchDataset", 1, inputs=_inputs_flat,
 185                              attrs=_attrs, ctx=_ctx, name=name)
 186   _execute.record_gradient(
 187       "BatchDataset", _inputs_flat, _attrs, _result, name)
 188   _result, = _result
 189   return _result
 190 
 191 
 192 def batch_dataset_v2(input_dataset, batch_size, drop_remainder, output_types, output_shapes, name=None):
 193   r"""Creates a dataset that batches `batch_size` elements from `input_dataset`.
 194 
 195   Args:
 196     input_dataset: A `Tensor` of type `variant`.
 197     batch_size: A `Tensor` of type `int64`.
 198       A scalar representing the number of elements to accumulate in a batch.
 199     drop_remainder: A `Tensor` of type `bool`.
 200       A scalar representing whether the last batch should be dropped in case its size
 201       is smaller than desired.
 202     output_types: A list of `tf.DTypes` that has length `>= 1`.
 203     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
 204     name: A name for the operation (optional).
 205 
 206   Returns:
 207     A `Tensor` of type `variant`.
 208   """
 209   _ctx = _context._context
 210   if _ctx is None or not _ctx._eager_context.is_eager:
 211     if not isinstance(output_types, (list, tuple)):
 212       raise TypeError(
 213           "Expected list for 'output_types' argument to "
 214           "'batch_dataset_v2' Op, not %r." % output_types)
 215     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 216     if not isinstance(output_shapes, (list, tuple)):
 217       raise TypeError(
 218           "Expected list for 'output_shapes' argument to "
 219           "'batch_dataset_v2' Op, not %r." % output_shapes)
 220     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 221     _, _, _op = _op_def_lib._apply_op_helper(
 222         "BatchDatasetV2", input_dataset=input_dataset, batch_size=batch_size,
 223         drop_remainder=drop_remainder, output_types=output_types,
 224         output_shapes=output_shapes, name=name)
 225     _result = _op.outputs[:]
 226     _inputs_flat = _op.inputs
 227     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
 228               _op.get_attr("output_shapes"))
 229     _execute.record_gradient(
 230       "BatchDatasetV2", _inputs_flat, _attrs, _result, name)
 231     _result, = _result
 232     return _result
 233 
 234   else:
 235     try:
 236       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 237         _ctx._context_handle, _ctx._eager_context.device_name,
 238         "BatchDatasetV2", name, _ctx._post_execution_callbacks, input_dataset,
 239         batch_size, drop_remainder, "output_types", output_types,
 240         "output_shapes", output_shapes)
 241       return _result
 242     except _core._FallbackException:
 243       return batch_dataset_v2_eager_fallback(
 244           input_dataset, batch_size, drop_remainder,
 245           output_types=output_types, output_shapes=output_shapes, name=name,
 246           ctx=_ctx)
 247     except _core._NotOkStatusException as e:
 248       if name is not None:
 249         message = e.message + " name: " + name
 250       else:
 251         message = e.message
 252       _six.raise_from(_core._status_to_exception(e.code, message), None)
 253 
 254 
 255 def batch_dataset_v2_eager_fallback(input_dataset, batch_size, drop_remainder, output_types, output_shapes, name=None, ctx=None):
 256   r"""This is the slowpath function for Eager mode.
 257   This is for function batch_dataset_v2
 258   """
 259   _ctx = ctx if ctx else _context.context()
 260   if not isinstance(output_types, (list, tuple)):
 261     raise TypeError(
 262         "Expected list for 'output_types' argument to "
 263         "'batch_dataset_v2' Op, not %r." % output_types)
 264   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 265   if not isinstance(output_shapes, (list, tuple)):
 266     raise TypeError(
 267         "Expected list for 'output_shapes' argument to "
 268         "'batch_dataset_v2' Op, not %r." % output_shapes)
 269   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 270   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
 271   batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
 272   drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool)
 273   _inputs_flat = [input_dataset, batch_size, drop_remainder]
 274   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
 275   _result = _execute.execute(b"BatchDatasetV2", 1, inputs=_inputs_flat,
 276                              attrs=_attrs, ctx=_ctx, name=name)
 277   _execute.record_gradient(
 278       "BatchDatasetV2", _inputs_flat, _attrs, _result, name)
 279   _result, = _result
 280   return _result
 281 
 282 
 283 def bytes_produced_stats_dataset(input_dataset, tag, output_types, output_shapes, name=None):
 284   r"""Records the bytes size of each element of `input_dataset` in a StatsAggregator.
 285 
 286   Args:
 287     input_dataset: A `Tensor` of type `variant`.
 288     tag: A `Tensor` of type `string`.
 289     output_types: A list of `tf.DTypes` that has length `>= 1`.
 290     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
 291     name: A name for the operation (optional).
 292 
 293   Returns:
 294     A `Tensor` of type `variant`.
 295   """
 296   _ctx = _context._context
 297   if _ctx is None or not _ctx._eager_context.is_eager:
 298     if not isinstance(output_types, (list, tuple)):
 299       raise TypeError(
 300           "Expected list for 'output_types' argument to "
 301           "'bytes_produced_stats_dataset' Op, not %r." % output_types)
 302     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 303     if not isinstance(output_shapes, (list, tuple)):
 304       raise TypeError(
 305           "Expected list for 'output_shapes' argument to "
 306           "'bytes_produced_stats_dataset' Op, not %r." % output_shapes)
 307     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 308     _, _, _op = _op_def_lib._apply_op_helper(
 309         "BytesProducedStatsDataset", input_dataset=input_dataset, tag=tag,
 310         output_types=output_types, output_shapes=output_shapes, name=name)
 311     _result = _op.outputs[:]
 312     _inputs_flat = _op.inputs
 313     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
 314               _op.get_attr("output_shapes"))
 315     _execute.record_gradient(
 316       "BytesProducedStatsDataset", _inputs_flat, _attrs, _result, name)
 317     _result, = _result
 318     return _result
 319 
 320   else:
 321     try:
 322       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 323         _ctx._context_handle, _ctx._eager_context.device_name,
 324         "BytesProducedStatsDataset", name, _ctx._post_execution_callbacks,
 325         input_dataset, tag, "output_types", output_types, "output_shapes",
 326         output_shapes)
 327       return _result
 328     except _core._FallbackException:
 329       return bytes_produced_stats_dataset_eager_fallback(
 330           input_dataset, tag, output_types=output_types,
 331           output_shapes=output_shapes, name=name, ctx=_ctx)
 332     except _core._NotOkStatusException as e:
 333       if name is not None:
 334         message = e.message + " name: " + name
 335       else:
 336         message = e.message
 337       _six.raise_from(_core._status_to_exception(e.code, message), None)
 338 
 339 
 340 def bytes_produced_stats_dataset_eager_fallback(input_dataset, tag, output_types, output_shapes, name=None, ctx=None):
 341   r"""This is the slowpath function for Eager mode.
 342   This is for function bytes_produced_stats_dataset
 343   """
 344   _ctx = ctx if ctx else _context.context()
 345   if not isinstance(output_types, (list, tuple)):
 346     raise TypeError(
 347         "Expected list for 'output_types' argument to "
 348         "'bytes_produced_stats_dataset' Op, not %r." % output_types)
 349   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 350   if not isinstance(output_shapes, (list, tuple)):
 351     raise TypeError(
 352         "Expected list for 'output_shapes' argument to "
 353         "'bytes_produced_stats_dataset' Op, not %r." % output_shapes)
 354   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 355   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
 356   tag = _ops.convert_to_tensor(tag, _dtypes.string)
 357   _inputs_flat = [input_dataset, tag]
 358   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
 359   _result = _execute.execute(b"BytesProducedStatsDataset", 1,
 360                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
 361                              name=name)
 362   _execute.record_gradient(
 363       "BytesProducedStatsDataset", _inputs_flat, _attrs, _result, name)
 364   _result, = _result
 365   return _result
 366 
 367 
 368 def cache_dataset(input_dataset, filename, output_types, output_shapes, name=None):
 369   r"""Creates a dataset that caches elements from `input_dataset`.
 370 
 371   A CacheDataset will iterate over the input_dataset, and store tensors. If the
 372   cache already exists, the cache will be used. If the cache is inappropriate
 373   (e.g. cannot be opened, contains tensors of the wrong shape / size), an error
 374   will the returned when used.
 375 
 376   Args:
 377     input_dataset: A `Tensor` of type `variant`.
 378     filename: A `Tensor` of type `string`.
 379       A path on the filesystem where we should cache the dataset. Note: this
 380       will be a directory.
 381     output_types: A list of `tf.DTypes` that has length `>= 1`.
 382     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
 383     name: A name for the operation (optional).
 384 
 385   Returns:
 386     A `Tensor` of type `variant`.
 387   """
 388   _ctx = _context._context
 389   if _ctx is None or not _ctx._eager_context.is_eager:
 390     if not isinstance(output_types, (list, tuple)):
 391       raise TypeError(
 392           "Expected list for 'output_types' argument to "
 393           "'cache_dataset' Op, not %r." % output_types)
 394     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 395     if not isinstance(output_shapes, (list, tuple)):
 396       raise TypeError(
 397           "Expected list for 'output_shapes' argument to "
 398           "'cache_dataset' Op, not %r." % output_shapes)
 399     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 400     _, _, _op = _op_def_lib._apply_op_helper(
 401         "CacheDataset", input_dataset=input_dataset, filename=filename,
 402         output_types=output_types, output_shapes=output_shapes, name=name)
 403     _result = _op.outputs[:]
 404     _inputs_flat = _op.inputs
 405     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
 406               _op.get_attr("output_shapes"))
 407     _execute.record_gradient(
 408       "CacheDataset", _inputs_flat, _attrs, _result, name)
 409     _result, = _result
 410     return _result
 411 
 412   else:
 413     try:
 414       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 415         _ctx._context_handle, _ctx._eager_context.device_name, "CacheDataset",
 416         name, _ctx._post_execution_callbacks, input_dataset, filename,
 417         "output_types", output_types, "output_shapes", output_shapes)
 418       return _result
 419     except _core._FallbackException:
 420       return cache_dataset_eager_fallback(
 421           input_dataset, filename, output_types=output_types,
 422           output_shapes=output_shapes, name=name, ctx=_ctx)
 423     except _core._NotOkStatusException as e:
 424       if name is not None:
 425         message = e.message + " name: " + name
 426       else:
 427         message = e.message
 428       _six.raise_from(_core._status_to_exception(e.code, message), None)
 429 
 430 
 431 def cache_dataset_eager_fallback(input_dataset, filename, output_types, output_shapes, name=None, ctx=None):
 432   r"""This is the slowpath function for Eager mode.
 433   This is for function cache_dataset
 434   """
 435   _ctx = ctx if ctx else _context.context()
 436   if not isinstance(output_types, (list, tuple)):
 437     raise TypeError(
 438         "Expected list for 'output_types' argument to "
 439         "'cache_dataset' Op, not %r." % output_types)
 440   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 441   if not isinstance(output_shapes, (list, tuple)):
 442     raise TypeError(
 443         "Expected list for 'output_shapes' argument to "
 444         "'cache_dataset' Op, not %r." % output_shapes)
 445   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 446   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
 447   filename = _ops.convert_to_tensor(filename, _dtypes.string)
 448   _inputs_flat = [input_dataset, filename]
 449   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
 450   _result = _execute.execute(b"CacheDataset", 1, inputs=_inputs_flat,
 451                              attrs=_attrs, ctx=_ctx, name=name)
 452   _execute.record_gradient(
 453       "CacheDataset", _inputs_flat, _attrs, _result, name)
 454   _result, = _result
 455   return _result
 456 
 457 
 458 def concatenate_dataset(input_dataset, another_dataset, output_types, output_shapes, name=None):
 459   r"""Creates a dataset that concatenates `input_dataset` with `another_dataset`.
 460 
 461   Args:
 462     input_dataset: A `Tensor` of type `variant`.
 463     another_dataset: A `Tensor` of type `variant`.
 464     output_types: A list of `tf.DTypes` that has length `>= 1`.
 465     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
 466     name: A name for the operation (optional).
 467 
 468   Returns:
 469     A `Tensor` of type `variant`.
 470   """
 471   _ctx = _context._context
 472   if _ctx is None or not _ctx._eager_context.is_eager:
 473     if not isinstance(output_types, (list, tuple)):
 474       raise TypeError(
 475           "Expected list for 'output_types' argument to "
 476           "'concatenate_dataset' Op, not %r." % output_types)
 477     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 478     if not isinstance(output_shapes, (list, tuple)):
 479       raise TypeError(
 480           "Expected list for 'output_shapes' argument to "
 481           "'concatenate_dataset' Op, not %r." % output_shapes)
 482     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 483     _, _, _op = _op_def_lib._apply_op_helper(
 484         "ConcatenateDataset", input_dataset=input_dataset,
 485         another_dataset=another_dataset, output_types=output_types,
 486         output_shapes=output_shapes, name=name)
 487     _result = _op.outputs[:]
 488     _inputs_flat = _op.inputs
 489     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
 490               _op.get_attr("output_shapes"))
 491     _execute.record_gradient(
 492       "ConcatenateDataset", _inputs_flat, _attrs, _result, name)
 493     _result, = _result
 494     return _result
 495 
 496   else:
 497     try:
 498       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 499         _ctx._context_handle, _ctx._eager_context.device_name,
 500         "ConcatenateDataset", name, _ctx._post_execution_callbacks,
 501         input_dataset, another_dataset, "output_types", output_types,
 502         "output_shapes", output_shapes)
 503       return _result
 504     except _core._FallbackException:
 505       return concatenate_dataset_eager_fallback(
 506           input_dataset, another_dataset, output_types=output_types,
 507           output_shapes=output_shapes, name=name, ctx=_ctx)
 508     except _core._NotOkStatusException as e:
 509       if name is not None:
 510         message = e.message + " name: " + name
 511       else:
 512         message = e.message
 513       _six.raise_from(_core._status_to_exception(e.code, message), None)
 514 
 515 
 516 def concatenate_dataset_eager_fallback(input_dataset, another_dataset, output_types, output_shapes, name=None, ctx=None):
 517   r"""This is the slowpath function for Eager mode.
 518   This is for function concatenate_dataset
 519   """
 520   _ctx = ctx if ctx else _context.context()
 521   if not isinstance(output_types, (list, tuple)):
 522     raise TypeError(
 523         "Expected list for 'output_types' argument to "
 524         "'concatenate_dataset' Op, not %r." % output_types)
 525   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 526   if not isinstance(output_shapes, (list, tuple)):
 527     raise TypeError(
 528         "Expected list for 'output_shapes' argument to "
 529         "'concatenate_dataset' Op, not %r." % output_shapes)
 530   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 531   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
 532   another_dataset = _ops.convert_to_tensor(another_dataset, _dtypes.variant)
 533   _inputs_flat = [input_dataset, another_dataset]
 534   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
 535   _result = _execute.execute(b"ConcatenateDataset", 1, inputs=_inputs_flat,
 536                              attrs=_attrs, ctx=_ctx, name=name)
 537   _execute.record_gradient(
 538       "ConcatenateDataset", _inputs_flat, _attrs, _result, name)
 539   _result, = _result
 540   return _result
 541 
 542 
 543 def dataset_to_graph(input_dataset, name=None):
 544   r"""Returns a serialized GraphDef representing `input_dataset`.
 545 
 546   Returns a graph representation for `input_dataset`.
 547 
 548   Args:
 549     input_dataset: A `Tensor` of type `variant`.
 550       A variant tensor representing the dataset to return the graph representation for.
 551     name: A name for the operation (optional).
 552 
 553   Returns:
 554     A `Tensor` of type `string`.
 555   """
 556   _ctx = _context._context
 557   if _ctx is None or not _ctx._eager_context.is_eager:
 558     _, _, _op = _op_def_lib._apply_op_helper(
 559         "DatasetToGraph", input_dataset=input_dataset, name=name)
 560     _result = _op.outputs[:]
 561     _inputs_flat = _op.inputs
 562     _attrs = None
 563     _execute.record_gradient(
 564       "DatasetToGraph", _inputs_flat, _attrs, _result, name)
 565     _result, = _result
 566     return _result
 567 
 568   else:
 569     try:
 570       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 571         _ctx._context_handle, _ctx._eager_context.device_name,
 572         "DatasetToGraph", name, _ctx._post_execution_callbacks, input_dataset)
 573       return _result
 574     except _core._FallbackException:
 575       return dataset_to_graph_eager_fallback(
 576           input_dataset, name=name, ctx=_ctx)
 577     except _core._NotOkStatusException as e:
 578       if name is not None:
 579         message = e.message + " name: " + name
 580       else:
 581         message = e.message
 582       _six.raise_from(_core._status_to_exception(e.code, message), None)
 583 
 584 
 585 def dataset_to_graph_eager_fallback(input_dataset, name=None, ctx=None):
 586   r"""This is the slowpath function for Eager mode.
 587   This is for function dataset_to_graph
 588   """
 589   _ctx = ctx if ctx else _context.context()
 590   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
 591   _inputs_flat = [input_dataset]
 592   _attrs = None
 593   _result = _execute.execute(b"DatasetToGraph", 1, inputs=_inputs_flat,
 594                              attrs=_attrs, ctx=_ctx, name=name)
 595   _execute.record_gradient(
 596       "DatasetToGraph", _inputs_flat, _attrs, _result, name)
 597   _result, = _result
 598   return _result
 599 
 600 
 601 def dataset_to_single_element(dataset, output_types, output_shapes, name=None):
 602   r"""Outputs the single element from the given dataset.
 603 
 604   Args:
 605     dataset: A `Tensor` of type `variant`.
 606       A handle to a dataset that contains a single element.
 607     output_types: A list of `tf.DTypes` that has length `>= 1`.
 608     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
 609     name: A name for the operation (optional).
 610 
 611   Returns:
 612     A list of `Tensor` objects of type `output_types`.
 613   """
 614   _ctx = _context._context
 615   if _ctx is None or not _ctx._eager_context.is_eager:
 616     if not isinstance(output_types, (list, tuple)):
 617       raise TypeError(
 618           "Expected list for 'output_types' argument to "
 619           "'dataset_to_single_element' Op, not %r." % output_types)
 620     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 621     if not isinstance(output_shapes, (list, tuple)):
 622       raise TypeError(
 623           "Expected list for 'output_shapes' argument to "
 624           "'dataset_to_single_element' Op, not %r." % output_shapes)
 625     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 626     _, _, _op = _op_def_lib._apply_op_helper(
 627         "DatasetToSingleElement", dataset=dataset, output_types=output_types,
 628         output_shapes=output_shapes, name=name)
 629     _result = _op.outputs[:]
 630     _inputs_flat = _op.inputs
 631     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
 632               _op.get_attr("output_shapes"))
 633     _execute.record_gradient(
 634       "DatasetToSingleElement", _inputs_flat, _attrs, _result, name)
 635     return _result
 636 
 637   else:
 638     try:
 639       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 640         _ctx._context_handle, _ctx._eager_context.device_name,
 641         "DatasetToSingleElement", name, _ctx._post_execution_callbacks,
 642         dataset, "output_types", output_types, "output_shapes", output_shapes)
 643       return _result
 644     except _core._FallbackException:
 645       return dataset_to_single_element_eager_fallback(
 646           dataset, output_types=output_types, output_shapes=output_shapes,
 647           name=name, ctx=_ctx)
 648     except _core._NotOkStatusException as e:
 649       if name is not None:
 650         message = e.message + " name: " + name
 651       else:
 652         message = e.message
 653       _six.raise_from(_core._status_to_exception(e.code, message), None)
 654 
 655 
 656 def dataset_to_single_element_eager_fallback(dataset, output_types, output_shapes, name=None, ctx=None):
 657   r"""This is the slowpath function for Eager mode.
 658   This is for function dataset_to_single_element
 659   """
 660   _ctx = ctx if ctx else _context.context()
 661   if not isinstance(output_types, (list, tuple)):
 662     raise TypeError(
 663         "Expected list for 'output_types' argument to "
 664         "'dataset_to_single_element' Op, not %r." % output_types)
 665   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 666   if not isinstance(output_shapes, (list, tuple)):
 667     raise TypeError(
 668         "Expected list for 'output_shapes' argument to "
 669         "'dataset_to_single_element' Op, not %r." % output_shapes)
 670   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 671   dataset = _ops.convert_to_tensor(dataset, _dtypes.variant)
 672   _inputs_flat = [dataset]
 673   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
 674   _result = _execute.execute(b"DatasetToSingleElement", len(output_types),
 675                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
 676                              name=name)
 677   _execute.record_gradient(
 678       "DatasetToSingleElement", _inputs_flat, _attrs, _result, name)
 679   return _result
 680 
 681 
 682 def dataset_to_tf_record(input_dataset, filename, compression_type, name=None):
 683   r"""Writes the given dataset to the given file using the TFRecord format.
 684 
 685   Args:
 686     input_dataset: A `Tensor` of type `variant`.
 687       A variant tensor representing the dataset to write.
 688     filename: A `Tensor` of type `string`.
 689       A scalar string tensor representing the filename to use.
 690     compression_type: A `Tensor` of type `string`.
 691       A scalar string tensor containing either (i) the empty string (no
 692       compression), (ii) "ZLIB", or (iii) "GZIP".
 693     name: A name for the operation (optional).
 694 
 695   Returns:
 696     The created Operation.
 697   """
 698   _ctx = _context._context
 699   if _ctx is None or not _ctx._eager_context.is_eager:
 700     _, _, _op = _op_def_lib._apply_op_helper(
 701         "DatasetToTFRecord", input_dataset=input_dataset, filename=filename,
 702         compression_type=compression_type, name=name)
 703     return _op
 704     _result = None
 705     return _result
 706 
 707   else:
 708     try:
 709       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 710         _ctx._context_handle, _ctx._eager_context.device_name,
 711         "DatasetToTFRecord", name, _ctx._post_execution_callbacks,
 712         input_dataset, filename, compression_type)
 713       return _result
 714     except _core._FallbackException:
 715       return dataset_to_tf_record_eager_fallback(
 716           input_dataset, filename, compression_type, name=name, ctx=_ctx)
 717     except _core._NotOkStatusException as e:
 718       if name is not None:
 719         message = e.message + " name: " + name
 720       else:
 721         message = e.message
 722       _six.raise_from(_core._status_to_exception(e.code, message), None)
 723 
 724 
 725 def dataset_to_tf_record_eager_fallback(input_dataset, filename, compression_type, name=None, ctx=None):
 726   r"""This is the slowpath function for Eager mode.
 727   This is for function dataset_to_tf_record
 728   """
 729   _ctx = ctx if ctx else _context.context()
 730   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
 731   filename = _ops.convert_to_tensor(filename, _dtypes.string)
 732   compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string)
 733   _inputs_flat = [input_dataset, filename, compression_type]
 734   _attrs = None
 735   _result = _execute.execute(b"DatasetToTFRecord", 0, inputs=_inputs_flat,
 736                              attrs=_attrs, ctx=_ctx, name=name)
 737   _result = None
 738   return _result
 739 
 740 
 741 def dense_to_sparse_batch_dataset(input_dataset, batch_size, row_shape, output_types, output_shapes, name=None):
 742   r"""Creates a dataset that batches input elements into a SparseTensor.
 743 
 744   Args:
 745     input_dataset: A `Tensor` of type `variant`.
 746       A handle to an input dataset. Must have a single component.
 747     batch_size: A `Tensor` of type `int64`.
 748       A scalar representing the number of elements to accumulate in a
 749       batch.
 750     row_shape: A `Tensor` of type `int64`.
 751       A vector representing the dense shape of each row in the produced
 752       SparseTensor. The shape may be partially specified, using `-1` to indicate
 753       that a particular dimension should use the maximum size of all batch elements.
 754     output_types: A list of `tf.DTypes` that has length `>= 1`.
 755     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
 756     name: A name for the operation (optional).
 757 
 758   Returns:
 759     A `Tensor` of type `variant`.
 760   """
 761   _ctx = _context._context
 762   if _ctx is None or not _ctx._eager_context.is_eager:
 763     if not isinstance(output_types, (list, tuple)):
 764       raise TypeError(
 765           "Expected list for 'output_types' argument to "
 766           "'dense_to_sparse_batch_dataset' Op, not %r." % output_types)
 767     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 768     if not isinstance(output_shapes, (list, tuple)):
 769       raise TypeError(
 770           "Expected list for 'output_shapes' argument to "
 771           "'dense_to_sparse_batch_dataset' Op, not %r." % output_shapes)
 772     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 773     _, _, _op = _op_def_lib._apply_op_helper(
 774         "DenseToSparseBatchDataset", input_dataset=input_dataset,
 775         batch_size=batch_size, row_shape=row_shape, output_types=output_types,
 776         output_shapes=output_shapes, name=name)
 777     _result = _op.outputs[:]
 778     _inputs_flat = _op.inputs
 779     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
 780               _op.get_attr("output_shapes"))
 781     _execute.record_gradient(
 782       "DenseToSparseBatchDataset", _inputs_flat, _attrs, _result, name)
 783     _result, = _result
 784     return _result
 785 
 786   else:
 787     try:
 788       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 789         _ctx._context_handle, _ctx._eager_context.device_name,
 790         "DenseToSparseBatchDataset", name, _ctx._post_execution_callbacks,
 791         input_dataset, batch_size, row_shape, "output_types", output_types,
 792         "output_shapes", output_shapes)
 793       return _result
 794     except _core._FallbackException:
 795       return dense_to_sparse_batch_dataset_eager_fallback(
 796           input_dataset, batch_size, row_shape, output_types=output_types,
 797           output_shapes=output_shapes, name=name, ctx=_ctx)
 798     except _core._NotOkStatusException as e:
 799       if name is not None:
 800         message = e.message + " name: " + name
 801       else:
 802         message = e.message
 803       _six.raise_from(_core._status_to_exception(e.code, message), None)
 804 
 805 
 806 def dense_to_sparse_batch_dataset_eager_fallback(input_dataset, batch_size, row_shape, output_types, output_shapes, name=None, ctx=None):
 807   r"""This is the slowpath function for Eager mode.
 808   This is for function dense_to_sparse_batch_dataset
 809   """
 810   _ctx = ctx if ctx else _context.context()
 811   if not isinstance(output_types, (list, tuple)):
 812     raise TypeError(
 813         "Expected list for 'output_types' argument to "
 814         "'dense_to_sparse_batch_dataset' Op, not %r." % output_types)
 815   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 816   if not isinstance(output_shapes, (list, tuple)):
 817     raise TypeError(
 818         "Expected list for 'output_shapes' argument to "
 819         "'dense_to_sparse_batch_dataset' Op, not %r." % output_shapes)
 820   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 821   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
 822   batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
 823   row_shape = _ops.convert_to_tensor(row_shape, _dtypes.int64)
 824   _inputs_flat = [input_dataset, batch_size, row_shape]
 825   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
 826   _result = _execute.execute(b"DenseToSparseBatchDataset", 1,
 827                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
 828                              name=name)
 829   _execute.record_gradient(
 830       "DenseToSparseBatchDataset", _inputs_flat, _attrs, _result, name)
 831   _result, = _result
 832   return _result
 833 
 834 
 835 def deserialize_iterator(resource_handle, serialized, name=None):
 836   r"""Converts the given variant tensor to an iterator and stores it in the given resource.
 837 
 838   Args:
 839     resource_handle: A `Tensor` of type `resource`.
 840       A handle to an iterator resource.
 841     serialized: A `Tensor` of type `variant`.
 842       A variant tensor storing the state of the iterator contained in the
 843       resource.
 844     name: A name for the operation (optional).
 845 
 846   Returns:
 847     The created Operation.
 848   """
 849   _ctx = _context._context
 850   if _ctx is None or not _ctx._eager_context.is_eager:
 851     _, _, _op = _op_def_lib._apply_op_helper(
 852         "DeserializeIterator", resource_handle=resource_handle,
 853         serialized=serialized, name=name)
 854     return _op
 855     _result = None
 856     return _result
 857 
 858   else:
 859     try:
 860       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 861         _ctx._context_handle, _ctx._eager_context.device_name,
 862         "DeserializeIterator", name, _ctx._post_execution_callbacks,
 863         resource_handle, serialized)
 864       return _result
 865     except _core._FallbackException:
 866       return deserialize_iterator_eager_fallback(
 867           resource_handle, serialized, name=name, ctx=_ctx)
 868     except _core._NotOkStatusException as e:
 869       if name is not None:
 870         message = e.message + " name: " + name
 871       else:
 872         message = e.message
 873       _six.raise_from(_core._status_to_exception(e.code, message), None)
 874 
 875 
 876 def deserialize_iterator_eager_fallback(resource_handle, serialized, name=None, ctx=None):
 877   r"""This is the slowpath function for Eager mode.
 878   This is for function deserialize_iterator
 879   """
 880   _ctx = ctx if ctx else _context.context()
 881   resource_handle = _ops.convert_to_tensor(resource_handle, _dtypes.resource)
 882   serialized = _ops.convert_to_tensor(serialized, _dtypes.variant)
 883   _inputs_flat = [resource_handle, serialized]
 884   _attrs = None
 885   _result = _execute.execute(b"DeserializeIterator", 0, inputs=_inputs_flat,
 886                              attrs=_attrs, ctx=_ctx, name=name)
 887   _result = None
 888   return _result
 889 
 890 
 891 def enqueue_in_queue_dataset(queue, components, name=None):
 892   r"""TODO: add doc.
 893 
 894   Args:
 895     queue: A `Tensor` of type `variant`.
 896     components: A list of `Tensor` objects.
 897     name: A name for the operation (optional).
 898 
 899   Returns:
 900     The created Operation.
 901   """
 902   _ctx = _context._context
 903   if _ctx is None or not _ctx._eager_context.is_eager:
 904     _, _, _op = _op_def_lib._apply_op_helper(
 905         "EnqueueInQueueDataset", queue=queue, components=components,
 906         name=name)
 907     return _op
 908     _result = None
 909     return _result
 910 
 911   else:
 912     try:
 913       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 914         _ctx._context_handle, _ctx._eager_context.device_name,
 915         "EnqueueInQueueDataset", name, _ctx._post_execution_callbacks, queue,
 916         components)
 917       return _result
 918     except _core._FallbackException:
 919       return enqueue_in_queue_dataset_eager_fallback(
 920           queue, components, name=name, ctx=_ctx)
 921     except _core._NotOkStatusException as e:
 922       if name is not None:
 923         message = e.message + " name: " + name
 924       else:
 925         message = e.message
 926       _six.raise_from(_core._status_to_exception(e.code, message), None)
 927 
 928 
 929 def enqueue_in_queue_dataset_eager_fallback(queue, components, name=None, ctx=None):
 930   r"""This is the slowpath function for Eager mode.
 931   This is for function enqueue_in_queue_dataset
 932   """
 933   _ctx = ctx if ctx else _context.context()
 934   _attr_Tcomponents, components = _execute.convert_to_mixed_eager_tensors(components, _ctx)
 935   queue = _ops.convert_to_tensor(queue, _dtypes.variant)
 936   _inputs_flat = [queue] + list(components)
 937   _attrs = ("Tcomponents", _attr_Tcomponents)
 938   _result = _execute.execute(b"EnqueueInQueueDataset", 0, inputs=_inputs_flat,
 939                              attrs=_attrs, ctx=_ctx, name=name)
 940   _result = None
 941   return _result
 942 
 943 
 944 def filter_by_last_component_dataset(input_dataset, output_types, output_shapes, name=None):
 945   r"""Creates a dataset containing elements of first component of `input_dataset` having true in the last component.
 946 
 947   Args:
 948     input_dataset: A `Tensor` of type `variant`.
 949     output_types: A list of `tf.DTypes` that has length `>= 1`.
 950     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
 951     name: A name for the operation (optional).
 952 
 953   Returns:
 954     A `Tensor` of type `variant`.
 955   """
 956   _ctx = _context._context
 957   if _ctx is None or not _ctx._eager_context.is_eager:
 958     if not isinstance(output_types, (list, tuple)):
 959       raise TypeError(
 960           "Expected list for 'output_types' argument to "
 961           "'filter_by_last_component_dataset' Op, not %r." % output_types)
 962     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
 963     if not isinstance(output_shapes, (list, tuple)):
 964       raise TypeError(
 965           "Expected list for 'output_shapes' argument to "
 966           "'filter_by_last_component_dataset' Op, not %r." % output_shapes)
 967     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
 968     _, _, _op = _op_def_lib._apply_op_helper(
 969         "FilterByLastComponentDataset", input_dataset=input_dataset,
 970         output_types=output_types, output_shapes=output_shapes, name=name)
 971     _result = _op.outputs[:]
 972     _inputs_flat = _op.inputs
 973     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
 974               _op.get_attr("output_shapes"))
 975     _execute.record_gradient(
 976       "FilterByLastComponentDataset", _inputs_flat, _attrs, _result, name)
 977     _result, = _result
 978     return _result
 979 
 980   else:
 981     try:
 982       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
 983         _ctx._context_handle, _ctx._eager_context.device_name,
 984         "FilterByLastComponentDataset", name, _ctx._post_execution_callbacks,
 985         input_dataset, "output_types", output_types, "output_shapes",
 986         output_shapes)
 987       return _result
 988     except _core._FallbackException:
 989       return filter_by_last_component_dataset_eager_fallback(
 990           input_dataset, output_types=output_types,
 991           output_shapes=output_shapes, name=name, ctx=_ctx)
 992     except _core._NotOkStatusException as e:
 993       if name is not None:
 994         message = e.message + " name: " + name
 995       else:
 996         message = e.message
 997       _six.raise_from(_core._status_to_exception(e.code, message), None)
 998 
 999 
1000 def filter_by_last_component_dataset_eager_fallback(input_dataset, output_types, output_shapes, name=None, ctx=None):
1001   r"""This is the slowpath function for Eager mode.
1002   This is for function filter_by_last_component_dataset
1003   """
1004   _ctx = ctx if ctx else _context.context()
1005   if not isinstance(output_types, (list, tuple)):
1006     raise TypeError(
1007         "Expected list for 'output_types' argument to "
1008         "'filter_by_last_component_dataset' Op, not %r." % output_types)
1009   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1010   if not isinstance(output_shapes, (list, tuple)):
1011     raise TypeError(
1012         "Expected list for 'output_shapes' argument to "
1013         "'filter_by_last_component_dataset' Op, not %r." % output_shapes)
1014   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1015   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
1016   _inputs_flat = [input_dataset]
1017   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
1018   _result = _execute.execute(b"FilterByLastComponentDataset", 1,
1019                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
1020                              name=name)
1021   _execute.record_gradient(
1022       "FilterByLastComponentDataset", _inputs_flat, _attrs, _result, name)
1023   _result, = _result
1024   return _result
1025 
1026 
1027 def filter_dataset(input_dataset, other_arguments, predicate, output_types, output_shapes, name=None):
1028   r"""Creates a dataset containing elements of `input_dataset` matching `predicate`.
1029 
1030   The `predicate` function must return a scalar boolean and accept the
1031   following arguments:
1032 
1033   * One tensor for each component of an element of `input_dataset`.
1034   * One tensor for each value in `other_arguments`.
1035 
1036   Args:
1037     input_dataset: A `Tensor` of type `variant`.
1038     other_arguments: A list of `Tensor` objects.
1039       A list of tensors, typically values that were captured when
1040       building a closure for `predicate`.
1041     predicate: A function decorated with @Defun.
1042       A function returning a scalar boolean.
1043     output_types: A list of `tf.DTypes` that has length `>= 1`.
1044     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1045     name: A name for the operation (optional).
1046 
1047   Returns:
1048     A `Tensor` of type `variant`.
1049   """
1050   _ctx = _context._context
1051   if _ctx is None or not _ctx._eager_context.is_eager:
1052     if not isinstance(output_types, (list, tuple)):
1053       raise TypeError(
1054           "Expected list for 'output_types' argument to "
1055           "'filter_dataset' Op, not %r." % output_types)
1056     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1057     if not isinstance(output_shapes, (list, tuple)):
1058       raise TypeError(
1059           "Expected list for 'output_shapes' argument to "
1060           "'filter_dataset' Op, not %r." % output_shapes)
1061     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1062     _, _, _op = _op_def_lib._apply_op_helper(
1063         "FilterDataset", input_dataset=input_dataset,
1064         other_arguments=other_arguments, predicate=predicate,
1065         output_types=output_types, output_shapes=output_shapes, name=name)
1066     _result = _op.outputs[:]
1067     _inputs_flat = _op.inputs
1068     _attrs = ("predicate", _op.get_attr("predicate"), "Targuments",
1069               _op.get_attr("Targuments"), "output_types",
1070               _op.get_attr("output_types"), "output_shapes",
1071               _op.get_attr("output_shapes"))
1072     _execute.record_gradient(
1073       "FilterDataset", _inputs_flat, _attrs, _result, name)
1074     _result, = _result
1075     return _result
1076 
1077   else:
1078     try:
1079       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
1080         _ctx._context_handle, _ctx._eager_context.device_name,
1081         "FilterDataset", name, _ctx._post_execution_callbacks, input_dataset,
1082         other_arguments, "predicate", predicate, "output_types", output_types,
1083         "output_shapes", output_shapes)
1084       return _result
1085     except _core._FallbackException:
1086       return filter_dataset_eager_fallback(
1087           input_dataset, other_arguments, predicate=predicate,
1088           output_types=output_types, output_shapes=output_shapes, name=name,
1089           ctx=_ctx)
1090     except _core._NotOkStatusException as e:
1091       if name is not None:
1092         message = e.message + " name: " + name
1093       else:
1094         message = e.message
1095       _six.raise_from(_core._status_to_exception(e.code, message), None)
1096 
1097 
1098 def filter_dataset_eager_fallback(input_dataset, other_arguments, predicate, output_types, output_shapes, name=None, ctx=None):
1099   r"""This is the slowpath function for Eager mode.
1100   This is for function filter_dataset
1101   """
1102   _ctx = ctx if ctx else _context.context()
1103   if not isinstance(output_types, (list, tuple)):
1104     raise TypeError(
1105         "Expected list for 'output_types' argument to "
1106         "'filter_dataset' Op, not %r." % output_types)
1107   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1108   if not isinstance(output_shapes, (list, tuple)):
1109     raise TypeError(
1110         "Expected list for 'output_shapes' argument to "
1111         "'filter_dataset' Op, not %r." % output_shapes)
1112   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1113   _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
1114   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
1115   _inputs_flat = [input_dataset] + list(other_arguments)
1116   _attrs = ("predicate", predicate, "Targuments", _attr_Targuments,
1117   "output_types", output_types, "output_shapes", output_shapes)
1118   _result = _execute.execute(b"FilterDataset", 1, inputs=_inputs_flat,
1119                              attrs=_attrs, ctx=_ctx, name=name)
1120   _execute.record_gradient(
1121       "FilterDataset", _inputs_flat, _attrs, _result, name)
1122   _result, = _result
1123   return _result
1124 
1125 
1126 def fixed_length_record_dataset(filenames, header_bytes, record_bytes, footer_bytes, buffer_size, name=None):
1127   r"""Creates a dataset that emits the records from one or more binary files.
1128 
1129   Args:
1130     filenames: A `Tensor` of type `string`.
1131       A scalar or a vector containing the name(s) of the file(s) to be
1132       read.
1133     header_bytes: A `Tensor` of type `int64`.
1134       A scalar representing the number of bytes to skip at the
1135       beginning of a file.
1136     record_bytes: A `Tensor` of type `int64`.
1137       A scalar representing the number of bytes in each record.
1138     footer_bytes: A `Tensor` of type `int64`.
1139       A scalar representing the number of bytes to skip at the end
1140       of a file.
1141     buffer_size: A `Tensor` of type `int64`.
1142       A scalar representing the number of bytes to buffer. Must be > 0.
1143     name: A name for the operation (optional).
1144 
1145   Returns:
1146     A `Tensor` of type `variant`.
1147   """
1148   _ctx = _context._context
1149   if _ctx is None or not _ctx._eager_context.is_eager:
1150     _, _, _op = _op_def_lib._apply_op_helper(
1151         "FixedLengthRecordDataset", filenames=filenames,
1152         header_bytes=header_bytes, record_bytes=record_bytes,
1153         footer_bytes=footer_bytes, buffer_size=buffer_size, name=name)
1154     _result = _op.outputs[:]
1155     _inputs_flat = _op.inputs
1156     _attrs = None
1157     _execute.record_gradient(
1158       "FixedLengthRecordDataset", _inputs_flat, _attrs, _result, name)
1159     _result, = _result
1160     return _result
1161 
1162   else:
1163     try:
1164       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
1165         _ctx._context_handle, _ctx._eager_context.device_name,
1166         "FixedLengthRecordDataset", name, _ctx._post_execution_callbacks,
1167         filenames, header_bytes, record_bytes, footer_bytes, buffer_size)
1168       return _result
1169     except _core._FallbackException:
1170       return fixed_length_record_dataset_eager_fallback(
1171           filenames, header_bytes, record_bytes, footer_bytes, buffer_size,
1172           name=name, ctx=_ctx)
1173     except _core._NotOkStatusException as e:
1174       if name is not None:
1175         message = e.message + " name: " + name
1176       else:
1177         message = e.message
1178       _six.raise_from(_core._status_to_exception(e.code, message), None)
1179 
1180 
1181 def fixed_length_record_dataset_eager_fallback(filenames, header_bytes, record_bytes, footer_bytes, buffer_size, name=None, ctx=None):
1182   r"""This is the slowpath function for Eager mode.
1183   This is for function fixed_length_record_dataset
1184   """
1185   _ctx = ctx if ctx else _context.context()
1186   filenames = _ops.convert_to_tensor(filenames, _dtypes.string)
1187   header_bytes = _ops.convert_to_tensor(header_bytes, _dtypes.int64)
1188   record_bytes = _ops.convert_to_tensor(record_bytes, _dtypes.int64)
1189   footer_bytes = _ops.convert_to_tensor(footer_bytes, _dtypes.int64)
1190   buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64)
1191   _inputs_flat = [filenames, header_bytes, record_bytes, footer_bytes, buffer_size]
1192   _attrs = None
1193   _result = _execute.execute(b"FixedLengthRecordDataset", 1,
1194                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
1195                              name=name)
1196   _execute.record_gradient(
1197       "FixedLengthRecordDataset", _inputs_flat, _attrs, _result, name)
1198   _result, = _result
1199   return _result
1200 
1201 
1202 def flat_map_dataset(input_dataset, other_arguments, f, output_types, output_shapes, name=None):
1203   r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
1204 
1205   Unlike MapDataset, the `f` in FlatMapDataset is expected to return a
1206   Dataset variant, and FlatMapDataset will flatten successive results
1207   into a single Dataset.
1208 
1209   Args:
1210     input_dataset: A `Tensor` of type `variant`.
1211     other_arguments: A list of `Tensor` objects.
1212     f: A function decorated with @Defun.
1213       A function mapping elements of `input_dataset`, concatenated with
1214       `other_arguments`, to a Dataset variant that contains elements matching
1215       `output_types` and `output_shapes`.
1216     output_types: A list of `tf.DTypes` that has length `>= 1`.
1217     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1218     name: A name for the operation (optional).
1219 
1220   Returns:
1221     A `Tensor` of type `variant`.
1222   """
1223   _ctx = _context._context
1224   if _ctx is None or not _ctx._eager_context.is_eager:
1225     if not isinstance(output_types, (list, tuple)):
1226       raise TypeError(
1227           "Expected list for 'output_types' argument to "
1228           "'flat_map_dataset' Op, not %r." % output_types)
1229     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1230     if not isinstance(output_shapes, (list, tuple)):
1231       raise TypeError(
1232           "Expected list for 'output_shapes' argument to "
1233           "'flat_map_dataset' Op, not %r." % output_shapes)
1234     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1235     _, _, _op = _op_def_lib._apply_op_helper(
1236         "FlatMapDataset", input_dataset=input_dataset,
1237         other_arguments=other_arguments, f=f, output_types=output_types,
1238         output_shapes=output_shapes, name=name)
1239     _result = _op.outputs[:]
1240     _inputs_flat = _op.inputs
1241     _attrs = ("f", _op.get_attr("f"), "Targuments",
1242               _op.get_attr("Targuments"), "output_types",
1243               _op.get_attr("output_types"), "output_shapes",
1244               _op.get_attr("output_shapes"))
1245     _execute.record_gradient(
1246       "FlatMapDataset", _inputs_flat, _attrs, _result, name)
1247     _result, = _result
1248     return _result
1249 
1250   else:
1251     try:
1252       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
1253         _ctx._context_handle, _ctx._eager_context.device_name,
1254         "FlatMapDataset", name, _ctx._post_execution_callbacks, input_dataset,
1255         other_arguments, "f", f, "output_types", output_types,
1256         "output_shapes", output_shapes)
1257       return _result
1258     except _core._FallbackException:
1259       return flat_map_dataset_eager_fallback(
1260           input_dataset, other_arguments, f=f, output_types=output_types,
1261           output_shapes=output_shapes, name=name, ctx=_ctx)
1262     except _core._NotOkStatusException as e:
1263       if name is not None:
1264         message = e.message + " name: " + name
1265       else:
1266         message = e.message
1267       _six.raise_from(_core._status_to_exception(e.code, message), None)
1268 
1269 
1270 def flat_map_dataset_eager_fallback(input_dataset, other_arguments, f, output_types, output_shapes, name=None, ctx=None):
1271   r"""This is the slowpath function for Eager mode.
1272   This is for function flat_map_dataset
1273   """
1274   _ctx = ctx if ctx else _context.context()
1275   if not isinstance(output_types, (list, tuple)):
1276     raise TypeError(
1277         "Expected list for 'output_types' argument to "
1278         "'flat_map_dataset' Op, not %r." % output_types)
1279   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1280   if not isinstance(output_shapes, (list, tuple)):
1281     raise TypeError(
1282         "Expected list for 'output_shapes' argument to "
1283         "'flat_map_dataset' Op, not %r." % output_shapes)
1284   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1285   _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
1286   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
1287   _inputs_flat = [input_dataset] + list(other_arguments)
1288   _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
1289   output_types, "output_shapes", output_shapes)
1290   _result = _execute.execute(b"FlatMapDataset", 1, inputs=_inputs_flat,
1291                              attrs=_attrs, ctx=_ctx, name=name)
1292   _execute.record_gradient(
1293       "FlatMapDataset", _inputs_flat, _attrs, _result, name)
1294   _result, = _result
1295   return _result
1296 
1297 
1298 def generator_dataset(init_func_other_args, next_func_other_args, finalize_func_other_args, init_func, next_func, finalize_func, output_types, output_shapes, name=None):
1299   r"""Creates a dataset that invokes a function to generate elements.
1300 
1301   Args:
1302     init_func_other_args: A list of `Tensor` objects.
1303     next_func_other_args: A list of `Tensor` objects.
1304     finalize_func_other_args: A list of `Tensor` objects.
1305     init_func: A function decorated with @Defun.
1306     next_func: A function decorated with @Defun.
1307     finalize_func: A function decorated with @Defun.
1308     output_types: A list of `tf.DTypes` that has length `>= 1`.
1309     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1310     name: A name for the operation (optional).
1311 
1312   Returns:
1313     A `Tensor` of type `variant`.
1314   """
1315   _ctx = _context._context
1316   if _ctx is None or not _ctx._eager_context.is_eager:
1317     if not isinstance(output_types, (list, tuple)):
1318       raise TypeError(
1319           "Expected list for 'output_types' argument to "
1320           "'generator_dataset' Op, not %r." % output_types)
1321     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1322     if not isinstance(output_shapes, (list, tuple)):
1323       raise TypeError(
1324           "Expected list for 'output_shapes' argument to "
1325           "'generator_dataset' Op, not %r." % output_shapes)
1326     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1327     _, _, _op = _op_def_lib._apply_op_helper(
1328         "GeneratorDataset", init_func_other_args=init_func_other_args,
1329         next_func_other_args=next_func_other_args,
1330         finalize_func_other_args=finalize_func_other_args,
1331         init_func=init_func, next_func=next_func, finalize_func=finalize_func,
1332         output_types=output_types, output_shapes=output_shapes, name=name)
1333     _result = _op.outputs[:]
1334     _inputs_flat = _op.inputs
1335     _attrs = ("init_func", _op.get_attr("init_func"), "next_func",
1336               _op.get_attr("next_func"), "finalize_func",
1337               _op.get_attr("finalize_func"), "Tinit_func_args",
1338               _op.get_attr("Tinit_func_args"), "Tnext_func_args",
1339               _op.get_attr("Tnext_func_args"), "Tfinalize_func_args",
1340               _op.get_attr("Tfinalize_func_args"), "output_types",
1341               _op.get_attr("output_types"), "output_shapes",
1342               _op.get_attr("output_shapes"))
1343     _execute.record_gradient(
1344       "GeneratorDataset", _inputs_flat, _attrs, _result, name)
1345     _result, = _result
1346     return _result
1347 
1348   else:
1349     try:
1350       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
1351         _ctx._context_handle, _ctx._eager_context.device_name,
1352         "GeneratorDataset", name, _ctx._post_execution_callbacks,
1353         init_func_other_args, next_func_other_args, finalize_func_other_args,
1354         "init_func", init_func, "next_func", next_func, "finalize_func",
1355         finalize_func, "output_types", output_types, "output_shapes",
1356         output_shapes)
1357       return _result
1358     except _core._FallbackException:
1359       return generator_dataset_eager_fallback(
1360           init_func_other_args, next_func_other_args,
1361           finalize_func_other_args, init_func=init_func, next_func=next_func,
1362           finalize_func=finalize_func, output_types=output_types,
1363           output_shapes=output_shapes, name=name, ctx=_ctx)
1364     except _core._NotOkStatusException as e:
1365       if name is not None:
1366         message = e.message + " name: " + name
1367       else:
1368         message = e.message
1369       _six.raise_from(_core._status_to_exception(e.code, message), None)
1370 
1371 
1372 def generator_dataset_eager_fallback(init_func_other_args, next_func_other_args, finalize_func_other_args, init_func, next_func, finalize_func, output_types, output_shapes, name=None, ctx=None):
1373   r"""This is the slowpath function for Eager mode.
1374   This is for function generator_dataset
1375   """
1376   _ctx = ctx if ctx else _context.context()
1377   if not isinstance(output_types, (list, tuple)):
1378     raise TypeError(
1379         "Expected list for 'output_types' argument to "
1380         "'generator_dataset' Op, not %r." % output_types)
1381   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1382   if not isinstance(output_shapes, (list, tuple)):
1383     raise TypeError(
1384         "Expected list for 'output_shapes' argument to "
1385         "'generator_dataset' Op, not %r." % output_shapes)
1386   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1387   _attr_Tinit_func_args, init_func_other_args = _execute.convert_to_mixed_eager_tensors(init_func_other_args, _ctx)
1388   _attr_Tnext_func_args, next_func_other_args = _execute.convert_to_mixed_eager_tensors(next_func_other_args, _ctx)
1389   _attr_Tfinalize_func_args, finalize_func_other_args = _execute.convert_to_mixed_eager_tensors(finalize_func_other_args, _ctx)
1390   _inputs_flat = list(init_func_other_args) + list(next_func_other_args) + list(finalize_func_other_args)
1391   _attrs = ("init_func", init_func, "next_func", next_func, "finalize_func",
1392   finalize_func, "Tinit_func_args", _attr_Tinit_func_args, "Tnext_func_args",
1393   _attr_Tnext_func_args, "Tfinalize_func_args", _attr_Tfinalize_func_args,
1394   "output_types", output_types, "output_shapes", output_shapes)
1395   _result = _execute.execute(b"GeneratorDataset", 1, inputs=_inputs_flat,
1396                              attrs=_attrs, ctx=_ctx, name=name)
1397   _execute.record_gradient(
1398       "GeneratorDataset", _inputs_flat, _attrs, _result, name)
1399   _result, = _result
1400   return _result
1401 
1402 
1403 def group_by_reducer_dataset(input_dataset, key_func_other_arguments, init_func_other_arguments, reduce_func_other_arguments, finalize_func_other_arguments, key_func, init_func, reduce_func, finalize_func, output_types, output_shapes, name=None):
1404   r"""Creates a dataset that computes a group-by on `input_dataset`.
1405 
1406   Creates a dataset that computes a group-by on `input_dataset`.
1407 
1408   Args:
1409     input_dataset: A `Tensor` of type `variant`.
1410       A variant tensor representing the input dataset.
1411     key_func_other_arguments: A list of `Tensor` objects.
1412       A list of tensors, typically values that were captured when
1413       building a closure for `key_func`.
1414     init_func_other_arguments: A list of `Tensor` objects.
1415       A list of tensors, typically values that were captured when
1416       building a closure for `init_func`.
1417     reduce_func_other_arguments: A list of `Tensor` objects.
1418       A list of tensors, typically values that were captured when
1419       building a closure for `reduce_func`.
1420     finalize_func_other_arguments: A list of `Tensor` objects.
1421       A list of tensors, typically values that were captured when
1422       building a closure for `finalize_func`.
1423     key_func: A function decorated with @Defun.
1424       A function mapping an element of `input_dataset`, concatenated
1425       with `key_func_other_arguments` to a scalar value of type DT_INT64.
1426     init_func: A function decorated with @Defun.
1427       A function mapping a key of type DT_INT64, concatenated with
1428       `init_func_other_arguments` to the initial reducer state.
1429     reduce_func: A function decorated with @Defun.
1430       A function mapping the current reducer state and an element of `input_dataset`,
1431       concatenated with `reduce_func_other_arguments` to a new reducer state.
1432     finalize_func: A function decorated with @Defun.
1433       A function mapping the final reducer state to an output element.
1434     output_types: A list of `tf.DTypes` that has length `>= 1`.
1435     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1436     name: A name for the operation (optional).
1437 
1438   Returns:
1439     A `Tensor` of type `variant`.
1440   """
1441   _ctx = _context._context
1442   if _ctx is None or not _ctx._eager_context.is_eager:
1443     if not isinstance(output_types, (list, tuple)):
1444       raise TypeError(
1445           "Expected list for 'output_types' argument to "
1446           "'group_by_reducer_dataset' Op, not %r." % output_types)
1447     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1448     if not isinstance(output_shapes, (list, tuple)):
1449       raise TypeError(
1450           "Expected list for 'output_shapes' argument to "
1451           "'group_by_reducer_dataset' Op, not %r." % output_shapes)
1452     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1453     _, _, _op = _op_def_lib._apply_op_helper(
1454         "GroupByReducerDataset", input_dataset=input_dataset,
1455         key_func_other_arguments=key_func_other_arguments,
1456         init_func_other_arguments=init_func_other_arguments,
1457         reduce_func_other_arguments=reduce_func_other_arguments,
1458         finalize_func_other_arguments=finalize_func_other_arguments,
1459         key_func=key_func, init_func=init_func, reduce_func=reduce_func,
1460         finalize_func=finalize_func, output_types=output_types,
1461         output_shapes=output_shapes, name=name)
1462     _result = _op.outputs[:]
1463     _inputs_flat = _op.inputs
1464     _attrs = ("key_func", _op.get_attr("key_func"), "init_func",
1465               _op.get_attr("init_func"), "reduce_func",
1466               _op.get_attr("reduce_func"), "finalize_func",
1467               _op.get_attr("finalize_func"), "Tkey_func_other_arguments",
1468               _op.get_attr("Tkey_func_other_arguments"),
1469               "Tinit_func_other_arguments",
1470               _op.get_attr("Tinit_func_other_arguments"),
1471               "Treduce_func_other_arguments",
1472               _op.get_attr("Treduce_func_other_arguments"),
1473               "Tfinalize_func_other_arguments",
1474               _op.get_attr("Tfinalize_func_other_arguments"), "output_types",
1475               _op.get_attr("output_types"), "output_shapes",
1476               _op.get_attr("output_shapes"))
1477     _execute.record_gradient(
1478       "GroupByReducerDataset", _inputs_flat, _attrs, _result, name)
1479     _result, = _result
1480     return _result
1481 
1482   else:
1483     try:
1484       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
1485         _ctx._context_handle, _ctx._eager_context.device_name,
1486         "GroupByReducerDataset", name, _ctx._post_execution_callbacks,
1487         input_dataset, key_func_other_arguments, init_func_other_arguments,
1488         reduce_func_other_arguments, finalize_func_other_arguments,
1489         "key_func", key_func, "init_func", init_func, "reduce_func",
1490         reduce_func, "finalize_func", finalize_func, "output_types",
1491         output_types, "output_shapes", output_shapes)
1492       return _result
1493     except _core._FallbackException:
1494       return group_by_reducer_dataset_eager_fallback(
1495           input_dataset, key_func_other_arguments, init_func_other_arguments,
1496           reduce_func_other_arguments, finalize_func_other_arguments,
1497           key_func=key_func, init_func=init_func, reduce_func=reduce_func,
1498           finalize_func=finalize_func, output_types=output_types,
1499           output_shapes=output_shapes, name=name, ctx=_ctx)
1500     except _core._NotOkStatusException as e:
1501       if name is not None:
1502         message = e.message + " name: " + name
1503       else:
1504         message = e.message
1505       _six.raise_from(_core._status_to_exception(e.code, message), None)
1506 
1507 
1508 def group_by_reducer_dataset_eager_fallback(input_dataset, key_func_other_arguments, init_func_other_arguments, reduce_func_other_arguments, finalize_func_other_arguments, key_func, init_func, reduce_func, finalize_func, output_types, output_shapes, name=None, ctx=None):
1509   r"""This is the slowpath function for Eager mode.
1510   This is for function group_by_reducer_dataset
1511   """
1512   _ctx = ctx if ctx else _context.context()
1513   if not isinstance(output_types, (list, tuple)):
1514     raise TypeError(
1515         "Expected list for 'output_types' argument to "
1516         "'group_by_reducer_dataset' Op, not %r." % output_types)
1517   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1518   if not isinstance(output_shapes, (list, tuple)):
1519     raise TypeError(
1520         "Expected list for 'output_shapes' argument to "
1521         "'group_by_reducer_dataset' Op, not %r." % output_shapes)
1522   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1523   _attr_Tkey_func_other_arguments, key_func_other_arguments = _execute.convert_to_mixed_eager_tensors(key_func_other_arguments, _ctx)
1524   _attr_Tinit_func_other_arguments, init_func_other_arguments = _execute.convert_to_mixed_eager_tensors(init_func_other_arguments, _ctx)
1525   _attr_Treduce_func_other_arguments, reduce_func_other_arguments = _execute.convert_to_mixed_eager_tensors(reduce_func_other_arguments, _ctx)
1526   _attr_Tfinalize_func_other_arguments, finalize_func_other_arguments = _execute.convert_to_mixed_eager_tensors(finalize_func_other_arguments, _ctx)
1527   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
1528   _inputs_flat = [input_dataset] + list(key_func_other_arguments) + list(init_func_other_arguments) + list(reduce_func_other_arguments) + list(finalize_func_other_arguments)
1529   _attrs = ("key_func", key_func, "init_func", init_func, "reduce_func",
1530   reduce_func, "finalize_func", finalize_func, "Tkey_func_other_arguments",
1531   _attr_Tkey_func_other_arguments, "Tinit_func_other_arguments",
1532   _attr_Tinit_func_other_arguments, "Treduce_func_other_arguments",
1533   _attr_Treduce_func_other_arguments, "Tfinalize_func_other_arguments",
1534   _attr_Tfinalize_func_other_arguments, "output_types", output_types,
1535   "output_shapes", output_shapes)
1536   _result = _execute.execute(b"GroupByReducerDataset", 1, inputs=_inputs_flat,
1537                              attrs=_attrs, ctx=_ctx, name=name)
1538   _execute.record_gradient(
1539       "GroupByReducerDataset", _inputs_flat, _attrs, _result, name)
1540   _result, = _result
1541   return _result
1542 
1543 
1544 def group_by_window_dataset(input_dataset, key_func_other_arguments, reduce_func_other_arguments, window_size_func_other_arguments, key_func, reduce_func, window_size_func, output_types, output_shapes, name=None):
1545   r"""Creates a dataset that computes a windowed group-by on `input_dataset`.
1546 
1547   // TODO(mrry): Support non-int64 keys.
1548 
1549   Args:
1550     input_dataset: A `Tensor` of type `variant`.
1551     key_func_other_arguments: A list of `Tensor` objects.
1552     reduce_func_other_arguments: A list of `Tensor` objects.
1553     window_size_func_other_arguments: A list of `Tensor` objects.
1554     key_func: A function decorated with @Defun.
1555       A function mapping an element of `input_dataset`, concatenated
1556       with `key_func_other_arguments` to a scalar value of type DT_INT64.
1557     reduce_func: A function decorated with @Defun.
1558     window_size_func: A function decorated with @Defun.
1559     output_types: A list of `tf.DTypes` that has length `>= 1`.
1560     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1561     name: A name for the operation (optional).
1562 
1563   Returns:
1564     A `Tensor` of type `variant`.
1565   """
1566   _ctx = _context._context
1567   if _ctx is None or not _ctx._eager_context.is_eager:
1568     if not isinstance(output_types, (list, tuple)):
1569       raise TypeError(
1570           "Expected list for 'output_types' argument to "
1571           "'group_by_window_dataset' Op, not %r." % output_types)
1572     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1573     if not isinstance(output_shapes, (list, tuple)):
1574       raise TypeError(
1575           "Expected list for 'output_shapes' argument to "
1576           "'group_by_window_dataset' Op, not %r." % output_shapes)
1577     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1578     _, _, _op = _op_def_lib._apply_op_helper(
1579         "GroupByWindowDataset", input_dataset=input_dataset,
1580         key_func_other_arguments=key_func_other_arguments,
1581         reduce_func_other_arguments=reduce_func_other_arguments,
1582         window_size_func_other_arguments=window_size_func_other_arguments,
1583         key_func=key_func, reduce_func=reduce_func,
1584         window_size_func=window_size_func, output_types=output_types,
1585         output_shapes=output_shapes, name=name)
1586     _result = _op.outputs[:]
1587     _inputs_flat = _op.inputs
1588     _attrs = ("key_func", _op.get_attr("key_func"), "reduce_func",
1589               _op.get_attr("reduce_func"), "window_size_func",
1590               _op.get_attr("window_size_func"), "Tkey_func_other_arguments",
1591               _op.get_attr("Tkey_func_other_arguments"),
1592               "Treduce_func_other_arguments",
1593               _op.get_attr("Treduce_func_other_arguments"),
1594               "Twindow_size_func_other_arguments",
1595               _op.get_attr("Twindow_size_func_other_arguments"),
1596               "output_types", _op.get_attr("output_types"), "output_shapes",
1597               _op.get_attr("output_shapes"))
1598     _execute.record_gradient(
1599       "GroupByWindowDataset", _inputs_flat, _attrs, _result, name)
1600     _result, = _result
1601     return _result
1602 
1603   else:
1604     try:
1605       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
1606         _ctx._context_handle, _ctx._eager_context.device_name,
1607         "GroupByWindowDataset", name, _ctx._post_execution_callbacks,
1608         input_dataset, key_func_other_arguments, reduce_func_other_arguments,
1609         window_size_func_other_arguments, "key_func", key_func, "reduce_func",
1610         reduce_func, "window_size_func", window_size_func, "output_types",
1611         output_types, "output_shapes", output_shapes)
1612       return _result
1613     except _core._FallbackException:
1614       return group_by_window_dataset_eager_fallback(
1615           input_dataset, key_func_other_arguments,
1616           reduce_func_other_arguments, window_size_func_other_arguments,
1617           key_func=key_func, reduce_func=reduce_func,
1618           window_size_func=window_size_func, output_types=output_types,
1619           output_shapes=output_shapes, name=name, ctx=_ctx)
1620     except _core._NotOkStatusException as e:
1621       if name is not None:
1622         message = e.message + " name: " + name
1623       else:
1624         message = e.message
1625       _six.raise_from(_core._status_to_exception(e.code, message), None)
1626 
1627 
1628 def group_by_window_dataset_eager_fallback(input_dataset, key_func_other_arguments, reduce_func_other_arguments, window_size_func_other_arguments, key_func, reduce_func, window_size_func, output_types, output_shapes, name=None, ctx=None):
1629   r"""This is the slowpath function for Eager mode.
1630   This is for function group_by_window_dataset
1631   """
1632   _ctx = ctx if ctx else _context.context()
1633   if not isinstance(output_types, (list, tuple)):
1634     raise TypeError(
1635         "Expected list for 'output_types' argument to "
1636         "'group_by_window_dataset' Op, not %r." % output_types)
1637   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1638   if not isinstance(output_shapes, (list, tuple)):
1639     raise TypeError(
1640         "Expected list for 'output_shapes' argument to "
1641         "'group_by_window_dataset' Op, not %r." % output_shapes)
1642   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1643   _attr_Tkey_func_other_arguments, key_func_other_arguments = _execute.convert_to_mixed_eager_tensors(key_func_other_arguments, _ctx)
1644   _attr_Treduce_func_other_arguments, reduce_func_other_arguments = _execute.convert_to_mixed_eager_tensors(reduce_func_other_arguments, _ctx)
1645   _attr_Twindow_size_func_other_arguments, window_size_func_other_arguments = _execute.convert_to_mixed_eager_tensors(window_size_func_other_arguments, _ctx)
1646   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
1647   _inputs_flat = [input_dataset] + list(key_func_other_arguments) + list(reduce_func_other_arguments) + list(window_size_func_other_arguments)
1648   _attrs = ("key_func", key_func, "reduce_func", reduce_func,
1649   "window_size_func", window_size_func, "Tkey_func_other_arguments",
1650   _attr_Tkey_func_other_arguments, "Treduce_func_other_arguments",
1651   _attr_Treduce_func_other_arguments, "Twindow_size_func_other_arguments",
1652   _attr_Twindow_size_func_other_arguments, "output_types", output_types,
1653   "output_shapes", output_shapes)
1654   _result = _execute.execute(b"GroupByWindowDataset", 1, inputs=_inputs_flat,
1655                              attrs=_attrs, ctx=_ctx, name=name)
1656   _execute.record_gradient(
1657       "GroupByWindowDataset", _inputs_flat, _attrs, _result, name)
1658   _result, = _result
1659   return _result
1660 
1661 
1662 def interleave_dataset(input_dataset, other_arguments, cycle_length, block_length, f, output_types, output_shapes, name=None):
1663   r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
1664 
1665   Unlike MapDataset, the `f` in InterleaveDataset is expected to return
1666   a Dataset variant, and InterleaveDataset will flatten successive
1667   results into a single Dataset. Unlike FlatMapDataset,
1668   InterleaveDataset will interleave sequences of up to `block_length`
1669   consecutive elements from `cycle_length` input elements.
1670 
1671   Args:
1672     input_dataset: A `Tensor` of type `variant`.
1673     other_arguments: A list of `Tensor` objects.
1674     cycle_length: A `Tensor` of type `int64`.
1675     block_length: A `Tensor` of type `int64`.
1676     f: A function decorated with @Defun.
1677       A function mapping elements of `input_dataset`, concatenated with
1678       `other_arguments`, to a Dataset variant that contains elements matching
1679       `output_types` and `output_shapes`.
1680     output_types: A list of `tf.DTypes` that has length `>= 1`.
1681     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1682     name: A name for the operation (optional).
1683 
1684   Returns:
1685     A `Tensor` of type `variant`.
1686   """
1687   _ctx = _context._context
1688   if _ctx is None or not _ctx._eager_context.is_eager:
1689     if not isinstance(output_types, (list, tuple)):
1690       raise TypeError(
1691           "Expected list for 'output_types' argument to "
1692           "'interleave_dataset' Op, not %r." % output_types)
1693     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1694     if not isinstance(output_shapes, (list, tuple)):
1695       raise TypeError(
1696           "Expected list for 'output_shapes' argument to "
1697           "'interleave_dataset' Op, not %r." % output_shapes)
1698     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1699     _, _, _op = _op_def_lib._apply_op_helper(
1700         "InterleaveDataset", input_dataset=input_dataset,
1701         other_arguments=other_arguments, cycle_length=cycle_length,
1702         block_length=block_length, f=f, output_types=output_types,
1703         output_shapes=output_shapes, name=name)
1704     _result = _op.outputs[:]
1705     _inputs_flat = _op.inputs
1706     _attrs = ("f", _op.get_attr("f"), "Targuments",
1707               _op.get_attr("Targuments"), "output_types",
1708               _op.get_attr("output_types"), "output_shapes",
1709               _op.get_attr("output_shapes"))
1710     _execute.record_gradient(
1711       "InterleaveDataset", _inputs_flat, _attrs, _result, name)
1712     _result, = _result
1713     return _result
1714 
1715   else:
1716     try:
1717       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
1718         _ctx._context_handle, _ctx._eager_context.device_name,
1719         "InterleaveDataset", name, _ctx._post_execution_callbacks,
1720         input_dataset, other_arguments, cycle_length, block_length, "f", f,
1721         "output_types", output_types, "output_shapes", output_shapes)
1722       return _result
1723     except _core._FallbackException:
1724       return interleave_dataset_eager_fallback(
1725           input_dataset, other_arguments, cycle_length, block_length, f=f,
1726           output_types=output_types, output_shapes=output_shapes, name=name,
1727           ctx=_ctx)
1728     except _core._NotOkStatusException as e:
1729       if name is not None:
1730         message = e.message + " name: " + name
1731       else:
1732         message = e.message
1733       _six.raise_from(_core._status_to_exception(e.code, message), None)
1734 
1735 
1736 def interleave_dataset_eager_fallback(input_dataset, other_arguments, cycle_length, block_length, f, output_types, output_shapes, name=None, ctx=None):
1737   r"""This is the slowpath function for Eager mode.
1738   This is for function interleave_dataset
1739   """
1740   _ctx = ctx if ctx else _context.context()
1741   if not isinstance(output_types, (list, tuple)):
1742     raise TypeError(
1743         "Expected list for 'output_types' argument to "
1744         "'interleave_dataset' Op, not %r." % output_types)
1745   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1746   if not isinstance(output_shapes, (list, tuple)):
1747     raise TypeError(
1748         "Expected list for 'output_shapes' argument to "
1749         "'interleave_dataset' Op, not %r." % output_shapes)
1750   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1751   _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
1752   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
1753   cycle_length = _ops.convert_to_tensor(cycle_length, _dtypes.int64)
1754   block_length = _ops.convert_to_tensor(block_length, _dtypes.int64)
1755   _inputs_flat = [input_dataset] + list(other_arguments) + [cycle_length, block_length]
1756   _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
1757   output_types, "output_shapes", output_shapes)
1758   _result = _execute.execute(b"InterleaveDataset", 1, inputs=_inputs_flat,
1759                              attrs=_attrs, ctx=_ctx, name=name)
1760   _execute.record_gradient(
1761       "InterleaveDataset", _inputs_flat, _attrs, _result, name)
1762   _result, = _result
1763   return _result
1764 
1765 
1766 def iterator(shared_name, container, output_types, output_shapes, name=None):
1767   r"""A container for an iterator resource.
1768 
1769   Args:
1770     shared_name: A `string`.
1771     container: A `string`.
1772     output_types: A list of `tf.DTypes` that has length `>= 1`.
1773     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
1774     name: A name for the operation (optional).
1775 
1776   Returns:
1777     A `Tensor` of type `resource`.
1778   """
1779   _ctx = _context._context
1780   if _ctx is None or not _ctx._eager_context.is_eager:
1781     shared_name = _execute.make_str(shared_name, "shared_name")
1782     container = _execute.make_str(container, "container")
1783     if not isinstance(output_types, (list, tuple)):
1784       raise TypeError(
1785           "Expected list for 'output_types' argument to "
1786           "'iterator' Op, not %r." % output_types)
1787     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1788     if not isinstance(output_shapes, (list, tuple)):
1789       raise TypeError(
1790           "Expected list for 'output_shapes' argument to "
1791           "'iterator' Op, not %r." % output_shapes)
1792     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1793     _, _, _op = _op_def_lib._apply_op_helper(
1794         "Iterator", shared_name=shared_name, container=container,
1795         output_types=output_types, output_shapes=output_shapes, name=name)
1796     _result = _op.outputs[:]
1797     _inputs_flat = _op.inputs
1798     _attrs = ("shared_name", _op.get_attr("shared_name"), "container",
1799               _op.get_attr("container"), "output_types",
1800               _op.get_attr("output_types"), "output_shapes",
1801               _op.get_attr("output_shapes"))
1802     _execute.record_gradient(
1803       "Iterator", _inputs_flat, _attrs, _result, name)
1804     _result, = _result
1805     return _result
1806 
1807   else:
1808     try:
1809       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
1810         _ctx._context_handle, _ctx._eager_context.device_name, "Iterator",
1811         name, _ctx._post_execution_callbacks, "shared_name", shared_name,
1812         "container", container, "output_types", output_types, "output_shapes",
1813         output_shapes)
1814       return _result
1815     except _core._FallbackException:
1816       return iterator_eager_fallback(
1817           shared_name=shared_name, container=container,
1818           output_types=output_types, output_shapes=output_shapes, name=name,
1819           ctx=_ctx)
1820     except _core._NotOkStatusException as e:
1821       if name is not None:
1822         message = e.message + " name: " + name
1823       else:
1824         message = e.message
1825       _six.raise_from(_core._status_to_exception(e.code, message), None)
1826 
1827 
1828 def iterator_eager_fallback(shared_name, container, output_types, output_shapes, name=None, ctx=None):
1829   r"""This is the slowpath function for Eager mode.
1830   This is for function iterator
1831   """
1832   _ctx = ctx if ctx else _context.context()
1833   shared_name = _execute.make_str(shared_name, "shared_name")
1834   container = _execute.make_str(container, "container")
1835   if not isinstance(output_types, (list, tuple)):
1836     raise TypeError(
1837         "Expected list for 'output_types' argument to "
1838         "'iterator' Op, not %r." % output_types)
1839   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1840   if not isinstance(output_shapes, (list, tuple)):
1841     raise TypeError(
1842         "Expected list for 'output_shapes' argument to "
1843         "'iterator' Op, not %r." % output_shapes)
1844   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1845   _inputs_flat = []
1846   _attrs = ("shared_name", shared_name, "container", container,
1847   "output_types", output_types, "output_shapes", output_shapes)
1848   _result = _execute.execute(b"Iterator", 1, inputs=_inputs_flat,
1849                              attrs=_attrs, ctx=_ctx, name=name)
1850   _execute.record_gradient(
1851       "Iterator", _inputs_flat, _attrs, _result, name)
1852   _result, = _result
1853   return _result
1854 
1855 
1856 def iterator_from_string_handle(string_handle, output_types=[], output_shapes=[], name=None):
1857   r"""Converts the given string representing a handle to an iterator to a resource.
1858 
1859   Args:
1860     string_handle: A `Tensor` of type `string`.
1861       A string representation of the given handle.
1862     output_types: An optional list of `tf.DTypes`. Defaults to `[]`.
1863       If specified, defines the type of each tuple component in an
1864       element produced by the resulting iterator.
1865     output_shapes: An optional list of shapes (each a `tf.TensorShape` or list of `ints`). Defaults to `[]`.
1866       If specified, defines the shape of each tuple component in an
1867       element produced by the resulting iterator.
1868     name: A name for the operation (optional).
1869 
1870   Returns:
1871     A `Tensor` of type `resource`.
1872   """
1873   _ctx = _context._context
1874   if _ctx is None or not _ctx._eager_context.is_eager:
1875     if output_types is None:
1876       output_types = []
1877     if not isinstance(output_types, (list, tuple)):
1878       raise TypeError(
1879           "Expected list for 'output_types' argument to "
1880           "'iterator_from_string_handle' Op, not %r." % output_types)
1881     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1882     if output_shapes is None:
1883       output_shapes = []
1884     if not isinstance(output_shapes, (list, tuple)):
1885       raise TypeError(
1886           "Expected list for 'output_shapes' argument to "
1887           "'iterator_from_string_handle' Op, not %r." % output_shapes)
1888     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1889     _, _, _op = _op_def_lib._apply_op_helper(
1890         "IteratorFromStringHandle", string_handle=string_handle,
1891         output_types=output_types, output_shapes=output_shapes, name=name)
1892     _result = _op.outputs[:]
1893     _inputs_flat = _op.inputs
1894     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
1895               _op.get_attr("output_shapes"))
1896     _execute.record_gradient(
1897       "IteratorFromStringHandle", _inputs_flat, _attrs, _result, name)
1898     _result, = _result
1899     return _result
1900 
1901   else:
1902     try:
1903       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
1904         _ctx._context_handle, _ctx._eager_context.device_name,
1905         "IteratorFromStringHandle", name, _ctx._post_execution_callbacks,
1906         string_handle, "output_types", output_types, "output_shapes",
1907         output_shapes)
1908       return _result
1909     except _core._FallbackException:
1910       return iterator_from_string_handle_eager_fallback(
1911           string_handle, output_types=output_types,
1912           output_shapes=output_shapes, name=name, ctx=_ctx)
1913     except _core._NotOkStatusException as e:
1914       if name is not None:
1915         message = e.message + " name: " + name
1916       else:
1917         message = e.message
1918       _six.raise_from(_core._status_to_exception(e.code, message), None)
1919 
1920 
1921 def iterator_from_string_handle_eager_fallback(string_handle, output_types=[], output_shapes=[], name=None, ctx=None):
1922   r"""This is the slowpath function for Eager mode.
1923   This is for function iterator_from_string_handle
1924   """
1925   _ctx = ctx if ctx else _context.context()
1926   if output_types is None:
1927     output_types = []
1928   if not isinstance(output_types, (list, tuple)):
1929     raise TypeError(
1930         "Expected list for 'output_types' argument to "
1931         "'iterator_from_string_handle' Op, not %r." % output_types)
1932   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1933   if output_shapes is None:
1934     output_shapes = []
1935   if not isinstance(output_shapes, (list, tuple)):
1936     raise TypeError(
1937         "Expected list for 'output_shapes' argument to "
1938         "'iterator_from_string_handle' Op, not %r." % output_shapes)
1939   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1940   string_handle = _ops.convert_to_tensor(string_handle, _dtypes.string)
1941   _inputs_flat = [string_handle]
1942   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
1943   _result = _execute.execute(b"IteratorFromStringHandle", 1,
1944                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
1945                              name=name)
1946   _execute.record_gradient(
1947       "IteratorFromStringHandle", _inputs_flat, _attrs, _result, name)
1948   _result, = _result
1949   return _result
1950 
1951 
1952 def iterator_from_string_handle_v2(string_handle, output_types=[], output_shapes=[], name=None):
1953   r"""TODO: add doc.
1954 
1955   Args:
1956     string_handle: A `Tensor` of type `string`.
1957     output_types: An optional list of `tf.DTypes`. Defaults to `[]`.
1958     output_shapes: An optional list of shapes (each a `tf.TensorShape` or list of `ints`). Defaults to `[]`.
1959     name: A name for the operation (optional).
1960 
1961   Returns:
1962     A `Tensor` of type `resource`.
1963   """
1964   _ctx = _context._context
1965   if _ctx is None or not _ctx._eager_context.is_eager:
1966     if output_types is None:
1967       output_types = []
1968     if not isinstance(output_types, (list, tuple)):
1969       raise TypeError(
1970           "Expected list for 'output_types' argument to "
1971           "'iterator_from_string_handle_v2' Op, not %r." % output_types)
1972     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
1973     if output_shapes is None:
1974       output_shapes = []
1975     if not isinstance(output_shapes, (list, tuple)):
1976       raise TypeError(
1977           "Expected list for 'output_shapes' argument to "
1978           "'iterator_from_string_handle_v2' Op, not %r." % output_shapes)
1979     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
1980     _, _, _op = _op_def_lib._apply_op_helper(
1981         "IteratorFromStringHandleV2", string_handle=string_handle,
1982         output_types=output_types, output_shapes=output_shapes, name=name)
1983     _result = _op.outputs[:]
1984     _inputs_flat = _op.inputs
1985     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
1986               _op.get_attr("output_shapes"))
1987     _execute.record_gradient(
1988       "IteratorFromStringHandleV2", _inputs_flat, _attrs, _result, name)
1989     _result, = _result
1990     return _result
1991 
1992   else:
1993     try:
1994       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
1995         _ctx._context_handle, _ctx._eager_context.device_name,
1996         "IteratorFromStringHandleV2", name, _ctx._post_execution_callbacks,
1997         string_handle, "output_types", output_types, "output_shapes",
1998         output_shapes)
1999       return _result
2000     except _core._FallbackException:
2001       return iterator_from_string_handle_v2_eager_fallback(
2002           string_handle, output_types=output_types,
2003           output_shapes=output_shapes, name=name, ctx=_ctx)
2004     except _core._NotOkStatusException as e:
2005       if name is not None:
2006         message = e.message + " name: " + name
2007       else:
2008         message = e.message
2009       _six.raise_from(_core._status_to_exception(e.code, message), None)
2010 
2011 
2012 def iterator_from_string_handle_v2_eager_fallback(string_handle, output_types=[], output_shapes=[], name=None, ctx=None):
2013   r"""This is the slowpath function for Eager mode.
2014   This is for function iterator_from_string_handle_v2
2015   """
2016   _ctx = ctx if ctx else _context.context()
2017   if output_types is None:
2018     output_types = []
2019   if not isinstance(output_types, (list, tuple)):
2020     raise TypeError(
2021         "Expected list for 'output_types' argument to "
2022         "'iterator_from_string_handle_v2' Op, not %r." % output_types)
2023   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2024   if output_shapes is None:
2025     output_shapes = []
2026   if not isinstance(output_shapes, (list, tuple)):
2027     raise TypeError(
2028         "Expected list for 'output_shapes' argument to "
2029         "'iterator_from_string_handle_v2' Op, not %r." % output_shapes)
2030   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2031   string_handle = _ops.convert_to_tensor(string_handle, _dtypes.string)
2032   _inputs_flat = [string_handle]
2033   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
2034   _result = _execute.execute(b"IteratorFromStringHandleV2", 1,
2035                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
2036                              name=name)
2037   _execute.record_gradient(
2038       "IteratorFromStringHandleV2", _inputs_flat, _attrs, _result, name)
2039   _result, = _result
2040   return _result
2041 
2042 
2043 def iterator_get_next(iterator, output_types, output_shapes, name=None):
2044   r"""Gets the next output from the given iterator .
2045 
2046   Args:
2047     iterator: A `Tensor` of type `resource`.
2048     output_types: A list of `tf.DTypes` that has length `>= 1`.
2049     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2050     name: A name for the operation (optional).
2051 
2052   Returns:
2053     A list of `Tensor` objects of type `output_types`.
2054   """
2055   _ctx = _context._context
2056   if _ctx is None or not _ctx._eager_context.is_eager:
2057     if not isinstance(output_types, (list, tuple)):
2058       raise TypeError(
2059           "Expected list for 'output_types' argument to "
2060           "'iterator_get_next' Op, not %r." % output_types)
2061     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2062     if not isinstance(output_shapes, (list, tuple)):
2063       raise TypeError(
2064           "Expected list for 'output_shapes' argument to "
2065           "'iterator_get_next' Op, not %r." % output_shapes)
2066     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2067     _, _, _op = _op_def_lib._apply_op_helper(
2068         "IteratorGetNext", iterator=iterator, output_types=output_types,
2069         output_shapes=output_shapes, name=name)
2070     _result = _op.outputs[:]
2071     if not _result:
2072       return _op
2073     _inputs_flat = _op.inputs
2074     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
2075               _op.get_attr("output_shapes"))
2076     _execute.record_gradient(
2077       "IteratorGetNext", _inputs_flat, _attrs, _result, name)
2078     return _result
2079 
2080   else:
2081     try:
2082       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
2083         _ctx._context_handle, _ctx._eager_context.device_name,
2084         "IteratorGetNext", name, _ctx._post_execution_callbacks, iterator,
2085         "output_types", output_types, "output_shapes", output_shapes)
2086       return _result
2087     except _core._FallbackException:
2088       return iterator_get_next_eager_fallback(
2089           iterator, output_types=output_types, output_shapes=output_shapes,
2090           name=name, ctx=_ctx)
2091     except _core._NotOkStatusException as e:
2092       if name is not None:
2093         message = e.message + " name: " + name
2094       else:
2095         message = e.message
2096       _six.raise_from(_core._status_to_exception(e.code, message), None)
2097 
2098 
2099 def iterator_get_next_eager_fallback(iterator, output_types, output_shapes, name=None, ctx=None):
2100   r"""This is the slowpath function for Eager mode.
2101   This is for function iterator_get_next
2102   """
2103   _ctx = ctx if ctx else _context.context()
2104   if not isinstance(output_types, (list, tuple)):
2105     raise TypeError(
2106         "Expected list for 'output_types' argument to "
2107         "'iterator_get_next' Op, not %r." % output_types)
2108   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2109   if not isinstance(output_shapes, (list, tuple)):
2110     raise TypeError(
2111         "Expected list for 'output_shapes' argument to "
2112         "'iterator_get_next' Op, not %r." % output_shapes)
2113   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2114   iterator = _ops.convert_to_tensor(iterator, _dtypes.resource)
2115   _inputs_flat = [iterator]
2116   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
2117   _result = _execute.execute(b"IteratorGetNext", len(output_types),
2118                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
2119                              name=name)
2120   _execute.record_gradient(
2121       "IteratorGetNext", _inputs_flat, _attrs, _result, name)
2122   return _result
2123 
2124 
2125 def iterator_get_next_as_optional(iterator, output_types, output_shapes, name=None):
2126   r"""Gets the next output from the given iterator as an Optional variant.
2127 
2128   Args:
2129     iterator: A `Tensor` of type `resource`.
2130     output_types: A list of `tf.DTypes` that has length `>= 1`.
2131     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2132     name: A name for the operation (optional).
2133 
2134   Returns:
2135     A `Tensor` of type `variant`.
2136   """
2137   _ctx = _context._context
2138   if _ctx is None or not _ctx._eager_context.is_eager:
2139     if not isinstance(output_types, (list, tuple)):
2140       raise TypeError(
2141           "Expected list for 'output_types' argument to "
2142           "'iterator_get_next_as_optional' Op, not %r." % output_types)
2143     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2144     if not isinstance(output_shapes, (list, tuple)):
2145       raise TypeError(
2146           "Expected list for 'output_shapes' argument to "
2147           "'iterator_get_next_as_optional' Op, not %r." % output_shapes)
2148     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2149     _, _, _op = _op_def_lib._apply_op_helper(
2150         "IteratorGetNextAsOptional", iterator=iterator,
2151         output_types=output_types, output_shapes=output_shapes, name=name)
2152     _result = _op.outputs[:]
2153     _inputs_flat = _op.inputs
2154     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
2155               _op.get_attr("output_shapes"))
2156     _execute.record_gradient(
2157       "IteratorGetNextAsOptional", _inputs_flat, _attrs, _result, name)
2158     _result, = _result
2159     return _result
2160 
2161   else:
2162     try:
2163       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
2164         _ctx._context_handle, _ctx._eager_context.device_name,
2165         "IteratorGetNextAsOptional", name, _ctx._post_execution_callbacks,
2166         iterator, "output_types", output_types, "output_shapes",
2167         output_shapes)
2168       return _result
2169     except _core._FallbackException:
2170       return iterator_get_next_as_optional_eager_fallback(
2171           iterator, output_types=output_types, output_shapes=output_shapes,
2172           name=name, ctx=_ctx)
2173     except _core._NotOkStatusException as e:
2174       if name is not None:
2175         message = e.message + " name: " + name
2176       else:
2177         message = e.message
2178       _six.raise_from(_core._status_to_exception(e.code, message), None)
2179 
2180 
2181 def iterator_get_next_as_optional_eager_fallback(iterator, output_types, output_shapes, name=None, ctx=None):
2182   r"""This is the slowpath function for Eager mode.
2183   This is for function iterator_get_next_as_optional
2184   """
2185   _ctx = ctx if ctx else _context.context()
2186   if not isinstance(output_types, (list, tuple)):
2187     raise TypeError(
2188         "Expected list for 'output_types' argument to "
2189         "'iterator_get_next_as_optional' Op, not %r." % output_types)
2190   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2191   if not isinstance(output_shapes, (list, tuple)):
2192     raise TypeError(
2193         "Expected list for 'output_shapes' argument to "
2194         "'iterator_get_next_as_optional' Op, not %r." % output_shapes)
2195   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2196   iterator = _ops.convert_to_tensor(iterator, _dtypes.resource)
2197   _inputs_flat = [iterator]
2198   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
2199   _result = _execute.execute(b"IteratorGetNextAsOptional", 1,
2200                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
2201                              name=name)
2202   _execute.record_gradient(
2203       "IteratorGetNextAsOptional", _inputs_flat, _attrs, _result, name)
2204   _result, = _result
2205   return _result
2206 
2207 
2208 def iterator_get_next_sync(iterator, output_types, output_shapes, name=None):
2209   r"""Gets the next output from the given iterator.
2210 
2211   This operation is a synchronous version IteratorGetNext. It should only be used
2212   in situations where the iterator does not block the calling thread, or where
2213   the calling thread is not a member of the thread pool used to execute parallel
2214   operations (e.g. in eager mode).
2215 
2216   Args:
2217     iterator: A `Tensor` of type `resource`.
2218     output_types: A list of `tf.DTypes` that has length `>= 1`.
2219     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2220     name: A name for the operation (optional).
2221 
2222   Returns:
2223     A list of `Tensor` objects of type `output_types`.
2224   """
2225   _ctx = _context._context
2226   if _ctx is None or not _ctx._eager_context.is_eager:
2227     if not isinstance(output_types, (list, tuple)):
2228       raise TypeError(
2229           "Expected list for 'output_types' argument to "
2230           "'iterator_get_next_sync' Op, not %r." % output_types)
2231     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2232     if not isinstance(output_shapes, (list, tuple)):
2233       raise TypeError(
2234           "Expected list for 'output_shapes' argument to "
2235           "'iterator_get_next_sync' Op, not %r." % output_shapes)
2236     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2237     _, _, _op = _op_def_lib._apply_op_helper(
2238         "IteratorGetNextSync", iterator=iterator, output_types=output_types,
2239         output_shapes=output_shapes, name=name)
2240     _result = _op.outputs[:]
2241     if not _result:
2242       return _op
2243     _inputs_flat = _op.inputs
2244     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
2245               _op.get_attr("output_shapes"))
2246     _execute.record_gradient(
2247       "IteratorGetNextSync", _inputs_flat, _attrs, _result, name)
2248     return _result
2249 
2250   else:
2251     try:
2252       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
2253         _ctx._context_handle, _ctx._eager_context.device_name,
2254         "IteratorGetNextSync", name, _ctx._post_execution_callbacks, iterator,
2255         "output_types", output_types, "output_shapes", output_shapes)
2256       return _result
2257     except _core._FallbackException:
2258       return iterator_get_next_sync_eager_fallback(
2259           iterator, output_types=output_types, output_shapes=output_shapes,
2260           name=name, ctx=_ctx)
2261     except _core._NotOkStatusException as e:
2262       if name is not None:
2263         message = e.message + " name: " + name
2264       else:
2265         message = e.message
2266       _six.raise_from(_core._status_to_exception(e.code, message), None)
2267 
2268 
2269 def iterator_get_next_sync_eager_fallback(iterator, output_types, output_shapes, name=None, ctx=None):
2270   r"""This is the slowpath function for Eager mode.
2271   This is for function iterator_get_next_sync
2272   """
2273   _ctx = ctx if ctx else _context.context()
2274   if not isinstance(output_types, (list, tuple)):
2275     raise TypeError(
2276         "Expected list for 'output_types' argument to "
2277         "'iterator_get_next_sync' Op, not %r." % output_types)
2278   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2279   if not isinstance(output_shapes, (list, tuple)):
2280     raise TypeError(
2281         "Expected list for 'output_shapes' argument to "
2282         "'iterator_get_next_sync' Op, not %r." % output_shapes)
2283   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2284   iterator = _ops.convert_to_tensor(iterator, _dtypes.resource)
2285   _inputs_flat = [iterator]
2286   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
2287   _result = _execute.execute(b"IteratorGetNextSync", len(output_types),
2288                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
2289                              name=name)
2290   _execute.record_gradient(
2291       "IteratorGetNextSync", _inputs_flat, _attrs, _result, name)
2292   return _result
2293 
2294 
2295 def iterator_to_string_handle(resource_handle, name=None):
2296   r"""Converts the given `resource_handle` representing an iterator to a string.
2297 
2298   Args:
2299     resource_handle: A `Tensor` of type `resource`.
2300       A handle to an iterator resource.
2301     name: A name for the operation (optional).
2302 
2303   Returns:
2304     A `Tensor` of type `string`.
2305   """
2306   _ctx = _context._context
2307   if _ctx is None or not _ctx._eager_context.is_eager:
2308     _, _, _op = _op_def_lib._apply_op_helper(
2309         "IteratorToStringHandle", resource_handle=resource_handle, name=name)
2310     _result = _op.outputs[:]
2311     _inputs_flat = _op.inputs
2312     _attrs = None
2313     _execute.record_gradient(
2314       "IteratorToStringHandle", _inputs_flat, _attrs, _result, name)
2315     _result, = _result
2316     return _result
2317 
2318   else:
2319     try:
2320       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
2321         _ctx._context_handle, _ctx._eager_context.device_name,
2322         "IteratorToStringHandle", name, _ctx._post_execution_callbacks,
2323         resource_handle)
2324       return _result
2325     except _core._FallbackException:
2326       return iterator_to_string_handle_eager_fallback(
2327           resource_handle, name=name, ctx=_ctx)
2328     except _core._NotOkStatusException as e:
2329       if name is not None:
2330         message = e.message + " name: " + name
2331       else:
2332         message = e.message
2333       _six.raise_from(_core._status_to_exception(e.code, message), None)
2334 
2335 
2336 def iterator_to_string_handle_eager_fallback(resource_handle, name=None, ctx=None):
2337   r"""This is the slowpath function for Eager mode.
2338   This is for function iterator_to_string_handle
2339   """
2340   _ctx = ctx if ctx else _context.context()
2341   resource_handle = _ops.convert_to_tensor(resource_handle, _dtypes.resource)
2342   _inputs_flat = [resource_handle]
2343   _attrs = None
2344   _result = _execute.execute(b"IteratorToStringHandle", 1,
2345                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
2346                              name=name)
2347   _execute.record_gradient(
2348       "IteratorToStringHandle", _inputs_flat, _attrs, _result, name)
2349   _result, = _result
2350   return _result
2351 
2352 
2353 def iterator_v2(shared_name, container, output_types, output_shapes, name=None):
2354   r"""TODO: add doc.
2355 
2356   Args:
2357     shared_name: A `string`.
2358     container: A `string`.
2359     output_types: A list of `tf.DTypes` that has length `>= 1`.
2360     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2361     name: A name for the operation (optional).
2362 
2363   Returns:
2364     A `Tensor` of type `resource`.
2365   """
2366   _ctx = _context._context
2367   if _ctx is None or not _ctx._eager_context.is_eager:
2368     shared_name = _execute.make_str(shared_name, "shared_name")
2369     container = _execute.make_str(container, "container")
2370     if not isinstance(output_types, (list, tuple)):
2371       raise TypeError(
2372           "Expected list for 'output_types' argument to "
2373           "'iterator_v2' Op, not %r." % output_types)
2374     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2375     if not isinstance(output_shapes, (list, tuple)):
2376       raise TypeError(
2377           "Expected list for 'output_shapes' argument to "
2378           "'iterator_v2' Op, not %r." % output_shapes)
2379     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2380     _, _, _op = _op_def_lib._apply_op_helper(
2381         "IteratorV2", shared_name=shared_name, container=container,
2382         output_types=output_types, output_shapes=output_shapes, name=name)
2383     _result = _op.outputs[:]
2384     _inputs_flat = _op.inputs
2385     _attrs = ("shared_name", _op.get_attr("shared_name"), "container",
2386               _op.get_attr("container"), "output_types",
2387               _op.get_attr("output_types"), "output_shapes",
2388               _op.get_attr("output_shapes"))
2389     _execute.record_gradient(
2390       "IteratorV2", _inputs_flat, _attrs, _result, name)
2391     _result, = _result
2392     return _result
2393 
2394   else:
2395     try:
2396       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
2397         _ctx._context_handle, _ctx._eager_context.device_name, "IteratorV2",
2398         name, _ctx._post_execution_callbacks, "shared_name", shared_name,
2399         "container", container, "output_types", output_types, "output_shapes",
2400         output_shapes)
2401       return _result
2402     except _core._FallbackException:
2403       return iterator_v2_eager_fallback(
2404           shared_name=shared_name, container=container,
2405           output_types=output_types, output_shapes=output_shapes, name=name,
2406           ctx=_ctx)
2407     except _core._NotOkStatusException as e:
2408       if name is not None:
2409         message = e.message + " name: " + name
2410       else:
2411         message = e.message
2412       _six.raise_from(_core._status_to_exception(e.code, message), None)
2413 
2414 
2415 def iterator_v2_eager_fallback(shared_name, container, output_types, output_shapes, name=None, ctx=None):
2416   r"""This is the slowpath function for Eager mode.
2417   This is for function iterator_v2
2418   """
2419   _ctx = ctx if ctx else _context.context()
2420   shared_name = _execute.make_str(shared_name, "shared_name")
2421   container = _execute.make_str(container, "container")
2422   if not isinstance(output_types, (list, tuple)):
2423     raise TypeError(
2424         "Expected list for 'output_types' argument to "
2425         "'iterator_v2' Op, not %r." % output_types)
2426   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2427   if not isinstance(output_shapes, (list, tuple)):
2428     raise TypeError(
2429         "Expected list for 'output_shapes' argument to "
2430         "'iterator_v2' Op, not %r." % output_shapes)
2431   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2432   _inputs_flat = []
2433   _attrs = ("shared_name", shared_name, "container", container,
2434   "output_types", output_types, "output_shapes", output_shapes)
2435   _result = _execute.execute(b"IteratorV2", 1, inputs=_inputs_flat,
2436                              attrs=_attrs, ctx=_ctx, name=name)
2437   _execute.record_gradient(
2438       "IteratorV2", _inputs_flat, _attrs, _result, name)
2439   _result, = _result
2440   return _result
2441 
2442 
2443 def latency_stats_dataset(input_dataset, tag, output_types, output_shapes, name=None):
2444   r"""Records the latency of producing `input_dataset` elements in a StatsAggregator.
2445 
2446   Args:
2447     input_dataset: A `Tensor` of type `variant`.
2448     tag: A `Tensor` of type `string`.
2449     output_types: A list of `tf.DTypes` that has length `>= 1`.
2450     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2451     name: A name for the operation (optional).
2452 
2453   Returns:
2454     A `Tensor` of type `variant`.
2455   """
2456   _ctx = _context._context
2457   if _ctx is None or not _ctx._eager_context.is_eager:
2458     if not isinstance(output_types, (list, tuple)):
2459       raise TypeError(
2460           "Expected list for 'output_types' argument to "
2461           "'latency_stats_dataset' Op, not %r." % output_types)
2462     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2463     if not isinstance(output_shapes, (list, tuple)):
2464       raise TypeError(
2465           "Expected list for 'output_shapes' argument to "
2466           "'latency_stats_dataset' Op, not %r." % output_shapes)
2467     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2468     _, _, _op = _op_def_lib._apply_op_helper(
2469         "LatencyStatsDataset", input_dataset=input_dataset, tag=tag,
2470         output_types=output_types, output_shapes=output_shapes, name=name)
2471     _result = _op.outputs[:]
2472     _inputs_flat = _op.inputs
2473     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
2474               _op.get_attr("output_shapes"))
2475     _execute.record_gradient(
2476       "LatencyStatsDataset", _inputs_flat, _attrs, _result, name)
2477     _result, = _result
2478     return _result
2479 
2480   else:
2481     try:
2482       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
2483         _ctx._context_handle, _ctx._eager_context.device_name,
2484         "LatencyStatsDataset", name, _ctx._post_execution_callbacks,
2485         input_dataset, tag, "output_types", output_types, "output_shapes",
2486         output_shapes)
2487       return _result
2488     except _core._FallbackException:
2489       return latency_stats_dataset_eager_fallback(
2490           input_dataset, tag, output_types=output_types,
2491           output_shapes=output_shapes, name=name, ctx=_ctx)
2492     except _core._NotOkStatusException as e:
2493       if name is not None:
2494         message = e.message + " name: " + name
2495       else:
2496         message = e.message
2497       _six.raise_from(_core._status_to_exception(e.code, message), None)
2498 
2499 
2500 def latency_stats_dataset_eager_fallback(input_dataset, tag, output_types, output_shapes, name=None, ctx=None):
2501   r"""This is the slowpath function for Eager mode.
2502   This is for function latency_stats_dataset
2503   """
2504   _ctx = ctx if ctx else _context.context()
2505   if not isinstance(output_types, (list, tuple)):
2506     raise TypeError(
2507         "Expected list for 'output_types' argument to "
2508         "'latency_stats_dataset' Op, not %r." % output_types)
2509   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2510   if not isinstance(output_shapes, (list, tuple)):
2511     raise TypeError(
2512         "Expected list for 'output_shapes' argument to "
2513         "'latency_stats_dataset' Op, not %r." % output_shapes)
2514   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2515   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
2516   tag = _ops.convert_to_tensor(tag, _dtypes.string)
2517   _inputs_flat = [input_dataset, tag]
2518   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
2519   _result = _execute.execute(b"LatencyStatsDataset", 1, inputs=_inputs_flat,
2520                              attrs=_attrs, ctx=_ctx, name=name)
2521   _execute.record_gradient(
2522       "LatencyStatsDataset", _inputs_flat, _attrs, _result, name)
2523   _result, = _result
2524   return _result
2525 
2526 
2527 def make_iterator(dataset, iterator, name=None):
2528   r"""Makes a new iterator from the given `dataset` and stores it in `iterator`.
2529 
2530   This operation may be executed multiple times. Each execution will reset the
2531   iterator in `iterator` to the first element of `dataset`.
2532 
2533   Args:
2534     dataset: A `Tensor` of type `variant`.
2535     iterator: A `Tensor` of type `resource`.
2536     name: A name for the operation (optional).
2537 
2538   Returns:
2539     The created Operation.
2540   """
2541   _ctx = _context._context
2542   if _ctx is None or not _ctx._eager_context.is_eager:
2543     _, _, _op = _op_def_lib._apply_op_helper(
2544         "MakeIterator", dataset=dataset, iterator=iterator, name=name)
2545     return _op
2546     _result = None
2547     return _result
2548 
2549   else:
2550     try:
2551       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
2552         _ctx._context_handle, _ctx._eager_context.device_name, "MakeIterator",
2553         name, _ctx._post_execution_callbacks, dataset, iterator)
2554       return _result
2555     except _core._FallbackException:
2556       return make_iterator_eager_fallback(
2557           dataset, iterator, name=name, ctx=_ctx)
2558     except _core._NotOkStatusException as e:
2559       if name is not None:
2560         message = e.message + " name: " + name
2561       else:
2562         message = e.message
2563       _six.raise_from(_core._status_to_exception(e.code, message), None)
2564 
2565 
2566 def make_iterator_eager_fallback(dataset, iterator, name=None, ctx=None):
2567   r"""This is the slowpath function for Eager mode.
2568   This is for function make_iterator
2569   """
2570   _ctx = ctx if ctx else _context.context()
2571   dataset = _ops.convert_to_tensor(dataset, _dtypes.variant)
2572   iterator = _ops.convert_to_tensor(iterator, _dtypes.resource)
2573   _inputs_flat = [dataset, iterator]
2574   _attrs = None
2575   _result = _execute.execute(b"MakeIterator", 0, inputs=_inputs_flat,
2576                              attrs=_attrs, ctx=_ctx, name=name)
2577   _result = None
2578   return _result
2579 
2580 
2581 def map_and_batch_dataset(input_dataset, other_arguments, batch_size, num_parallel_batches, drop_remainder, f, output_types, output_shapes, name=None):
2582   r"""Creates a dataset that fuses mapping with batching.
2583 
2584   Creates a dataset that applies `f` to the outputs of `input_dataset` and then
2585   batches `batch_size` of them.
2586 
2587   Unlike a "MapDataset", which applies `f` sequentially, this dataset invokes up
2588   to `batch_size * num_parallel_batches` copies of `f` in parallel.
2589 
2590   Args:
2591     input_dataset: A `Tensor` of type `variant`.
2592       A variant tensor representing the input dataset.
2593     other_arguments: A list of `Tensor` objects.
2594       A list of tensors, typically values that were captured when building a closure
2595       for `f`.
2596     batch_size: A `Tensor` of type `int64`.
2597       A scalar representing the number of elements to accumulate in a
2598       batch. It determines the number of concurrent invocations of `f` that process
2599       elements from `input_dataset` in parallel.
2600     num_parallel_batches: A `Tensor` of type `int64`.
2601       A scalar representing the number of batches to create in parallel. Processing
2602       multiple batches in parallel benefits workloads prone to stragglers.
2603     drop_remainder: A `Tensor` of type `bool`.
2604       A scalar representing whether the last batch should be dropped in case its size
2605       is smaller than desired.
2606     f: A function decorated with @Defun.
2607       A function to apply to the outputs of `input_dataset`.
2608     output_types: A list of `tf.DTypes` that has length `>= 1`.
2609     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2610     name: A name for the operation (optional).
2611 
2612   Returns:
2613     A `Tensor` of type `variant`.
2614   """
2615   _ctx = _context._context
2616   if _ctx is None or not _ctx._eager_context.is_eager:
2617     if not isinstance(output_types, (list, tuple)):
2618       raise TypeError(
2619           "Expected list for 'output_types' argument to "
2620           "'map_and_batch_dataset' Op, not %r." % output_types)
2621     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2622     if not isinstance(output_shapes, (list, tuple)):
2623       raise TypeError(
2624           "Expected list for 'output_shapes' argument to "
2625           "'map_and_batch_dataset' Op, not %r." % output_shapes)
2626     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2627     _, _, _op = _op_def_lib._apply_op_helper(
2628         "MapAndBatchDataset", input_dataset=input_dataset,
2629         other_arguments=other_arguments, batch_size=batch_size,
2630         num_parallel_batches=num_parallel_batches,
2631         drop_remainder=drop_remainder, f=f, output_types=output_types,
2632         output_shapes=output_shapes, name=name)
2633     _result = _op.outputs[:]
2634     _inputs_flat = _op.inputs
2635     _attrs = ("f", _op.get_attr("f"), "Targuments",
2636               _op.get_attr("Targuments"), "output_types",
2637               _op.get_attr("output_types"), "output_shapes",
2638               _op.get_attr("output_shapes"))
2639     _execute.record_gradient(
2640       "MapAndBatchDataset", _inputs_flat, _attrs, _result, name)
2641     _result, = _result
2642     return _result
2643 
2644   else:
2645     try:
2646       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
2647         _ctx._context_handle, _ctx._eager_context.device_name,
2648         "MapAndBatchDataset", name, _ctx._post_execution_callbacks,
2649         input_dataset, other_arguments, batch_size, num_parallel_batches,
2650         drop_remainder, "f", f, "output_types", output_types, "output_shapes",
2651         output_shapes)
2652       return _result
2653     except _core._FallbackException:
2654       return map_and_batch_dataset_eager_fallback(
2655           input_dataset, other_arguments, batch_size, num_parallel_batches,
2656           drop_remainder, f=f, output_types=output_types,
2657           output_shapes=output_shapes, name=name, ctx=_ctx)
2658     except _core._NotOkStatusException as e:
2659       if name is not None:
2660         message = e.message + " name: " + name
2661       else:
2662         message = e.message
2663       _six.raise_from(_core._status_to_exception(e.code, message), None)
2664 
2665 
2666 def map_and_batch_dataset_eager_fallback(input_dataset, other_arguments, batch_size, num_parallel_batches, drop_remainder, f, output_types, output_shapes, name=None, ctx=None):
2667   r"""This is the slowpath function for Eager mode.
2668   This is for function map_and_batch_dataset
2669   """
2670   _ctx = ctx if ctx else _context.context()
2671   if not isinstance(output_types, (list, tuple)):
2672     raise TypeError(
2673         "Expected list for 'output_types' argument to "
2674         "'map_and_batch_dataset' Op, not %r." % output_types)
2675   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2676   if not isinstance(output_shapes, (list, tuple)):
2677     raise TypeError(
2678         "Expected list for 'output_shapes' argument to "
2679         "'map_and_batch_dataset' Op, not %r." % output_shapes)
2680   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2681   _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
2682   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
2683   batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
2684   num_parallel_batches = _ops.convert_to_tensor(num_parallel_batches, _dtypes.int64)
2685   drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool)
2686   _inputs_flat = [input_dataset] + list(other_arguments) + [batch_size, num_parallel_batches, drop_remainder]
2687   _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
2688   output_types, "output_shapes", output_shapes)
2689   _result = _execute.execute(b"MapAndBatchDataset", 1, inputs=_inputs_flat,
2690                              attrs=_attrs, ctx=_ctx, name=name)
2691   _execute.record_gradient(
2692       "MapAndBatchDataset", _inputs_flat, _attrs, _result, name)
2693   _result, = _result
2694   return _result
2695 
2696 
2697 def map_and_batch_dataset_v2(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, name=None):
2698   r"""Creates a dataset that fuses mapping with batching.
2699 
2700   Creates a dataset that applies `f` to the outputs of `input_dataset` and then
2701   batches `batch_size` of them.
2702 
2703   Unlike a "MapDataset", which applies `f` sequentially, this dataset invokes up
2704   to `batch_size * num_parallel_batches` copies of `f` in parallel.
2705 
2706   Args:
2707     input_dataset: A `Tensor` of type `variant`.
2708       A variant tensor representing the input dataset.
2709     other_arguments: A list of `Tensor` objects.
2710       A list of tensors, typically values that were captured when building a closure
2711       for `f`.
2712     batch_size: A `Tensor` of type `int64`.
2713       A scalar representing the number of elements to accumulate in a
2714       batch. It determines the number of concurrent invocations of `f` that process
2715       elements from `input_dataset` in parallel.
2716     num_parallel_calls: A `Tensor` of type `int64`.
2717       A scalar representing the maximum number of parallel invocations of the `map_fn`
2718       function. Applying the `map_fn` on consecutive input elements in parallel has
2719       the potential to improve input pipeline throughput.
2720     drop_remainder: A `Tensor` of type `bool`.
2721       A scalar representing whether the last batch should be dropped in case its size
2722       is smaller than desired.
2723     f: A function decorated with @Defun.
2724       A function to apply to the outputs of `input_dataset`.
2725     output_types: A list of `tf.DTypes` that has length `>= 1`.
2726     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2727     name: A name for the operation (optional).
2728 
2729   Returns:
2730     A `Tensor` of type `variant`.
2731   """
2732   _ctx = _context._context
2733   if _ctx is None or not _ctx._eager_context.is_eager:
2734     if not isinstance(output_types, (list, tuple)):
2735       raise TypeError(
2736           "Expected list for 'output_types' argument to "
2737           "'map_and_batch_dataset_v2' Op, not %r." % output_types)
2738     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2739     if not isinstance(output_shapes, (list, tuple)):
2740       raise TypeError(
2741           "Expected list for 'output_shapes' argument to "
2742           "'map_and_batch_dataset_v2' Op, not %r." % output_shapes)
2743     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2744     _, _, _op = _op_def_lib._apply_op_helper(
2745         "MapAndBatchDatasetV2", input_dataset=input_dataset,
2746         other_arguments=other_arguments, batch_size=batch_size,
2747         num_parallel_calls=num_parallel_calls, drop_remainder=drop_remainder,
2748         f=f, output_types=output_types, output_shapes=output_shapes,
2749         name=name)
2750     _result = _op.outputs[:]
2751     _inputs_flat = _op.inputs
2752     _attrs = ("f", _op.get_attr("f"), "Targuments",
2753               _op.get_attr("Targuments"), "output_types",
2754               _op.get_attr("output_types"), "output_shapes",
2755               _op.get_attr("output_shapes"))
2756     _execute.record_gradient(
2757       "MapAndBatchDatasetV2", _inputs_flat, _attrs, _result, name)
2758     _result, = _result
2759     return _result
2760 
2761   else:
2762     try:
2763       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
2764         _ctx._context_handle, _ctx._eager_context.device_name,
2765         "MapAndBatchDatasetV2", name, _ctx._post_execution_callbacks,
2766         input_dataset, other_arguments, batch_size, num_parallel_calls,
2767         drop_remainder, "f", f, "output_types", output_types, "output_shapes",
2768         output_shapes)
2769       return _result
2770     except _core._FallbackException:
2771       return map_and_batch_dataset_v2_eager_fallback(
2772           input_dataset, other_arguments, batch_size, num_parallel_calls,
2773           drop_remainder, f=f, output_types=output_types,
2774           output_shapes=output_shapes, name=name, ctx=_ctx)
2775     except _core._NotOkStatusException as e:
2776       if name is not None:
2777         message = e.message + " name: " + name
2778       else:
2779         message = e.message
2780       _six.raise_from(_core._status_to_exception(e.code, message), None)
2781 
2782 
2783 def map_and_batch_dataset_v2_eager_fallback(input_dataset, other_arguments, batch_size, num_parallel_calls, drop_remainder, f, output_types, output_shapes, name=None, ctx=None):
2784   r"""This is the slowpath function for Eager mode.
2785   This is for function map_and_batch_dataset_v2
2786   """
2787   _ctx = ctx if ctx else _context.context()
2788   if not isinstance(output_types, (list, tuple)):
2789     raise TypeError(
2790         "Expected list for 'output_types' argument to "
2791         "'map_and_batch_dataset_v2' Op, not %r." % output_types)
2792   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2793   if not isinstance(output_shapes, (list, tuple)):
2794     raise TypeError(
2795         "Expected list for 'output_shapes' argument to "
2796         "'map_and_batch_dataset_v2' Op, not %r." % output_shapes)
2797   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2798   _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
2799   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
2800   batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
2801   num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64)
2802   drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool)
2803   _inputs_flat = [input_dataset] + list(other_arguments) + [batch_size, num_parallel_calls, drop_remainder]
2804   _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
2805   output_types, "output_shapes", output_shapes)
2806   _result = _execute.execute(b"MapAndBatchDatasetV2", 1, inputs=_inputs_flat,
2807                              attrs=_attrs, ctx=_ctx, name=name)
2808   _execute.record_gradient(
2809       "MapAndBatchDatasetV2", _inputs_flat, _attrs, _result, name)
2810   _result, = _result
2811   return _result
2812 
2813 
2814 def map_dataset(input_dataset, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism=True, name=None):
2815   r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
2816 
2817   Args:
2818     input_dataset: A `Tensor` of type `variant`.
2819     other_arguments: A list of `Tensor` objects.
2820     f: A function decorated with @Defun.
2821     output_types: A list of `tf.DTypes` that has length `>= 1`.
2822     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2823     use_inter_op_parallelism: An optional `bool`. Defaults to `True`.
2824     name: A name for the operation (optional).
2825 
2826   Returns:
2827     A `Tensor` of type `variant`.
2828   """
2829   _ctx = _context._context
2830   if _ctx is None or not _ctx._eager_context.is_eager:
2831     if not isinstance(output_types, (list, tuple)):
2832       raise TypeError(
2833           "Expected list for 'output_types' argument to "
2834           "'map_dataset' Op, not %r." % output_types)
2835     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2836     if not isinstance(output_shapes, (list, tuple)):
2837       raise TypeError(
2838           "Expected list for 'output_shapes' argument to "
2839           "'map_dataset' Op, not %r." % output_shapes)
2840     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2841     if use_inter_op_parallelism is None:
2842       use_inter_op_parallelism = True
2843     use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism")
2844     _, _, _op = _op_def_lib._apply_op_helper(
2845         "MapDataset", input_dataset=input_dataset,
2846         other_arguments=other_arguments, f=f, output_types=output_types,
2847         output_shapes=output_shapes,
2848         use_inter_op_parallelism=use_inter_op_parallelism, name=name)
2849     _result = _op.outputs[:]
2850     _inputs_flat = _op.inputs
2851     _attrs = ("f", _op.get_attr("f"), "Targuments",
2852               _op.get_attr("Targuments"), "output_types",
2853               _op.get_attr("output_types"), "output_shapes",
2854               _op.get_attr("output_shapes"), "use_inter_op_parallelism",
2855               _op.get_attr("use_inter_op_parallelism"))
2856     _execute.record_gradient(
2857       "MapDataset", _inputs_flat, _attrs, _result, name)
2858     _result, = _result
2859     return _result
2860 
2861   else:
2862     try:
2863       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
2864         _ctx._context_handle, _ctx._eager_context.device_name, "MapDataset",
2865         name, _ctx._post_execution_callbacks, input_dataset, other_arguments,
2866         "f", f, "output_types", output_types, "output_shapes", output_shapes,
2867         "use_inter_op_parallelism", use_inter_op_parallelism)
2868       return _result
2869     except _core._FallbackException:
2870       return map_dataset_eager_fallback(
2871           input_dataset, other_arguments, f=f, output_types=output_types,
2872           output_shapes=output_shapes,
2873           use_inter_op_parallelism=use_inter_op_parallelism, name=name,
2874           ctx=_ctx)
2875     except _core._NotOkStatusException as e:
2876       if name is not None:
2877         message = e.message + " name: " + name
2878       else:
2879         message = e.message
2880       _six.raise_from(_core._status_to_exception(e.code, message), None)
2881 
2882 
2883 def map_dataset_eager_fallback(input_dataset, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism=True, name=None, ctx=None):
2884   r"""This is the slowpath function for Eager mode.
2885   This is for function map_dataset
2886   """
2887   _ctx = ctx if ctx else _context.context()
2888   if not isinstance(output_types, (list, tuple)):
2889     raise TypeError(
2890         "Expected list for 'output_types' argument to "
2891         "'map_dataset' Op, not %r." % output_types)
2892   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2893   if not isinstance(output_shapes, (list, tuple)):
2894     raise TypeError(
2895         "Expected list for 'output_shapes' argument to "
2896         "'map_dataset' Op, not %r." % output_shapes)
2897   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2898   if use_inter_op_parallelism is None:
2899     use_inter_op_parallelism = True
2900   use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism")
2901   _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
2902   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
2903   _inputs_flat = [input_dataset] + list(other_arguments)
2904   _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
2905   output_types, "output_shapes", output_shapes, "use_inter_op_parallelism",
2906   use_inter_op_parallelism)
2907   _result = _execute.execute(b"MapDataset", 1, inputs=_inputs_flat,
2908                              attrs=_attrs, ctx=_ctx, name=name)
2909   _execute.record_gradient(
2910       "MapDataset", _inputs_flat, _attrs, _result, name)
2911   _result, = _result
2912   return _result
2913 
2914 
2915 def map_defun(arguments, output_types, output_shapes, f, name=None):
2916   r"""  Maps a function on the list of tensors unpacked from inputs on dimension 0.
2917 
2918   Args:
2919     arguments: A list of `Tensor` objects.
2920           A list of tensors whose types are Targuments, corresponding to the inputs the
2921           function should be mapped over.
2922     output_types: A list of `tf.DTypes` that has length `>= 1`.
2923       A list of types.
2924     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
2925       A list of shapes.
2926     f: A function decorated with @Defun.
2927     name: A name for the operation (optional).
2928 
2929   Returns:
2930     A list of `Tensor` objects of type `output_types`.
2931   """
2932   _ctx = _context._context
2933   if _ctx is None or not _ctx._eager_context.is_eager:
2934     if not isinstance(output_types, (list, tuple)):
2935       raise TypeError(
2936           "Expected list for 'output_types' argument to "
2937           "'map_defun' Op, not %r." % output_types)
2938     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2939     if not isinstance(output_shapes, (list, tuple)):
2940       raise TypeError(
2941           "Expected list for 'output_shapes' argument to "
2942           "'map_defun' Op, not %r." % output_shapes)
2943     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2944     _, _, _op = _op_def_lib._apply_op_helper(
2945         "MapDefun", arguments=arguments, output_types=output_types,
2946         output_shapes=output_shapes, f=f, name=name)
2947     _result = _op.outputs[:]
2948     _inputs_flat = _op.inputs
2949     _attrs = ("Targuments", _op.get_attr("Targuments"), "output_types",
2950               _op.get_attr("output_types"), "output_shapes",
2951               _op.get_attr("output_shapes"), "f", _op.get_attr("f"))
2952     _execute.record_gradient(
2953       "MapDefun", _inputs_flat, _attrs, _result, name)
2954     return _result
2955 
2956   else:
2957     try:
2958       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
2959         _ctx._context_handle, _ctx._eager_context.device_name, "MapDefun",
2960         name, _ctx._post_execution_callbacks, arguments, "output_types",
2961         output_types, "output_shapes", output_shapes, "f", f)
2962       return _result
2963     except _core._FallbackException:
2964       return map_defun_eager_fallback(
2965           arguments, output_types=output_types, output_shapes=output_shapes,
2966           f=f, name=name, ctx=_ctx)
2967     except _core._NotOkStatusException as e:
2968       if name is not None:
2969         message = e.message + " name: " + name
2970       else:
2971         message = e.message
2972       _six.raise_from(_core._status_to_exception(e.code, message), None)
2973 
2974 
2975 def map_defun_eager_fallback(arguments, output_types, output_shapes, f, name=None, ctx=None):
2976   r"""This is the slowpath function for Eager mode.
2977   This is for function map_defun
2978   """
2979   _ctx = ctx if ctx else _context.context()
2980   if not isinstance(output_types, (list, tuple)):
2981     raise TypeError(
2982         "Expected list for 'output_types' argument to "
2983         "'map_defun' Op, not %r." % output_types)
2984   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
2985   if not isinstance(output_shapes, (list, tuple)):
2986     raise TypeError(
2987         "Expected list for 'output_shapes' argument to "
2988         "'map_defun' Op, not %r." % output_shapes)
2989   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
2990   _attr_Targuments, arguments = _execute.convert_to_mixed_eager_tensors(arguments, _ctx)
2991   _inputs_flat = list(arguments)
2992   _attrs = ("Targuments", _attr_Targuments, "output_types", output_types,
2993   "output_shapes", output_shapes, "f", f)
2994   _result = _execute.execute(b"MapDefun", len(output_types),
2995                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
2996                              name=name)
2997   _execute.record_gradient(
2998       "MapDefun", _inputs_flat, _attrs, _result, name)
2999   return _result
3000 
3001 
3002 def model_dataset(input_dataset, output_types, output_shapes, name=None):
3003   r"""Identity transformation that models performance.
3004 
3005   Identity transformation that models performance.
3006 
3007   Args:
3008     input_dataset: A `Tensor` of type `variant`.
3009       A variant tensor representing the input dataset.
3010     output_types: A list of `tf.DTypes` that has length `>= 1`.
3011     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3012     name: A name for the operation (optional).
3013 
3014   Returns:
3015     A `Tensor` of type `variant`.
3016   """
3017   _ctx = _context._context
3018   if _ctx is None or not _ctx._eager_context.is_eager:
3019     if not isinstance(output_types, (list, tuple)):
3020       raise TypeError(
3021           "Expected list for 'output_types' argument to "
3022           "'model_dataset' Op, not %r." % output_types)
3023     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3024     if not isinstance(output_shapes, (list, tuple)):
3025       raise TypeError(
3026           "Expected list for 'output_shapes' argument to "
3027           "'model_dataset' Op, not %r." % output_shapes)
3028     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3029     _, _, _op = _op_def_lib._apply_op_helper(
3030         "ModelDataset", input_dataset=input_dataset,
3031         output_types=output_types, output_shapes=output_shapes, name=name)
3032     _result = _op.outputs[:]
3033     _inputs_flat = _op.inputs
3034     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
3035               _op.get_attr("output_shapes"))
3036     _execute.record_gradient(
3037       "ModelDataset", _inputs_flat, _attrs, _result, name)
3038     _result, = _result
3039     return _result
3040 
3041   else:
3042     try:
3043       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3044         _ctx._context_handle, _ctx._eager_context.device_name, "ModelDataset",
3045         name, _ctx._post_execution_callbacks, input_dataset, "output_types",
3046         output_types, "output_shapes", output_shapes)
3047       return _result
3048     except _core._FallbackException:
3049       return model_dataset_eager_fallback(
3050           input_dataset, output_types=output_types,
3051           output_shapes=output_shapes, name=name, ctx=_ctx)
3052     except _core._NotOkStatusException as e:
3053       if name is not None:
3054         message = e.message + " name: " + name
3055       else:
3056         message = e.message
3057       _six.raise_from(_core._status_to_exception(e.code, message), None)
3058 
3059 
3060 def model_dataset_eager_fallback(input_dataset, output_types, output_shapes, name=None, ctx=None):
3061   r"""This is the slowpath function for Eager mode.
3062   This is for function model_dataset
3063   """
3064   _ctx = ctx if ctx else _context.context()
3065   if not isinstance(output_types, (list, tuple)):
3066     raise TypeError(
3067         "Expected list for 'output_types' argument to "
3068         "'model_dataset' Op, not %r." % output_types)
3069   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3070   if not isinstance(output_shapes, (list, tuple)):
3071     raise TypeError(
3072         "Expected list for 'output_shapes' argument to "
3073         "'model_dataset' Op, not %r." % output_shapes)
3074   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3075   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
3076   _inputs_flat = [input_dataset]
3077   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
3078   _result = _execute.execute(b"ModelDataset", 1, inputs=_inputs_flat,
3079                              attrs=_attrs, ctx=_ctx, name=name)
3080   _execute.record_gradient(
3081       "ModelDataset", _inputs_flat, _attrs, _result, name)
3082   _result, = _result
3083   return _result
3084 
3085 
3086 def multi_device_iterator(devices, shared_name, container, output_types, output_shapes, name=None):
3087   r"""Creates a MultiDeviceIterator resource.
3088 
3089   Args:
3090     devices: A list of `strings` that has length `>= 1`.
3091       A list of devices the iterator works across.
3092     shared_name: A `string`.
3093       If non-empty, this resource will be shared under the given name
3094       across multiple sessions.
3095     container: A `string`.
3096       If non-empty, this resource is placed in the given container.
3097       Otherwise, a default container is used.
3098     output_types: A list of `tf.DTypes` that has length `>= 1`.
3099       The type list for the return values.
3100     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3101       The list of shapes being produced.
3102     name: A name for the operation (optional).
3103 
3104   Returns:
3105     A `Tensor` of type `resource`.
3106   """
3107   _ctx = _context._context
3108   if _ctx is None or not _ctx._eager_context.is_eager:
3109     if not isinstance(devices, (list, tuple)):
3110       raise TypeError(
3111           "Expected list for 'devices' argument to "
3112           "'multi_device_iterator' Op, not %r." % devices)
3113     devices = [_execute.make_str(_s, "devices") for _s in devices]
3114     shared_name = _execute.make_str(shared_name, "shared_name")
3115     container = _execute.make_str(container, "container")
3116     if not isinstance(output_types, (list, tuple)):
3117       raise TypeError(
3118           "Expected list for 'output_types' argument to "
3119           "'multi_device_iterator' Op, not %r." % output_types)
3120     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3121     if not isinstance(output_shapes, (list, tuple)):
3122       raise TypeError(
3123           "Expected list for 'output_shapes' argument to "
3124           "'multi_device_iterator' Op, not %r." % output_shapes)
3125     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3126     _, _, _op = _op_def_lib._apply_op_helper(
3127         "MultiDeviceIterator", devices=devices, shared_name=shared_name,
3128         container=container, output_types=output_types,
3129         output_shapes=output_shapes, name=name)
3130     _result = _op.outputs[:]
3131     _inputs_flat = _op.inputs
3132     _attrs = ("devices", _op.get_attr("devices"), "shared_name",
3133               _op.get_attr("shared_name"), "container",
3134               _op.get_attr("container"), "output_types",
3135               _op.get_attr("output_types"), "output_shapes",
3136               _op.get_attr("output_shapes"))
3137     _execute.record_gradient(
3138       "MultiDeviceIterator", _inputs_flat, _attrs, _result, name)
3139     _result, = _result
3140     return _result
3141 
3142   else:
3143     try:
3144       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3145         _ctx._context_handle, _ctx._eager_context.device_name,
3146         "MultiDeviceIterator", name, _ctx._post_execution_callbacks,
3147         "devices", devices, "shared_name", shared_name, "container",
3148         container, "output_types", output_types, "output_shapes",
3149         output_shapes)
3150       return _result
3151     except _core._FallbackException:
3152       return multi_device_iterator_eager_fallback(
3153           devices=devices, shared_name=shared_name, container=container,
3154           output_types=output_types, output_shapes=output_shapes, name=name,
3155           ctx=_ctx)
3156     except _core._NotOkStatusException as e:
3157       if name is not None:
3158         message = e.message + " name: " + name
3159       else:
3160         message = e.message
3161       _six.raise_from(_core._status_to_exception(e.code, message), None)
3162 
3163 
3164 def multi_device_iterator_eager_fallback(devices, shared_name, container, output_types, output_shapes, name=None, ctx=None):
3165   r"""This is the slowpath function for Eager mode.
3166   This is for function multi_device_iterator
3167   """
3168   _ctx = ctx if ctx else _context.context()
3169   if not isinstance(devices, (list, tuple)):
3170     raise TypeError(
3171         "Expected list for 'devices' argument to "
3172         "'multi_device_iterator' Op, not %r." % devices)
3173   devices = [_execute.make_str(_s, "devices") for _s in devices]
3174   shared_name = _execute.make_str(shared_name, "shared_name")
3175   container = _execute.make_str(container, "container")
3176   if not isinstance(output_types, (list, tuple)):
3177     raise TypeError(
3178         "Expected list for 'output_types' argument to "
3179         "'multi_device_iterator' Op, not %r." % output_types)
3180   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3181   if not isinstance(output_shapes, (list, tuple)):
3182     raise TypeError(
3183         "Expected list for 'output_shapes' argument to "
3184         "'multi_device_iterator' Op, not %r." % output_shapes)
3185   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3186   _inputs_flat = []
3187   _attrs = ("devices", devices, "shared_name", shared_name, "container",
3188   container, "output_types", output_types, "output_shapes", output_shapes)
3189   _result = _execute.execute(b"MultiDeviceIterator", 1, inputs=_inputs_flat,
3190                              attrs=_attrs, ctx=_ctx, name=name)
3191   _execute.record_gradient(
3192       "MultiDeviceIterator", _inputs_flat, _attrs, _result, name)
3193   _result, = _result
3194   return _result
3195 
3196 
3197 def multi_device_iterator_from_string_handle(string_handle, output_types=[], output_shapes=[], name=None):
3198   r"""Generates a MultiDeviceIterator resource from its provided string handle.
3199 
3200   Args:
3201     string_handle: A `Tensor` of type `string`.
3202       String representing the resource.
3203     output_types: An optional list of `tf.DTypes`. Defaults to `[]`.
3204       The type list for the return values.
3205     output_shapes: An optional list of shapes (each a `tf.TensorShape` or list of `ints`). Defaults to `[]`.
3206       The list of shapes being produced.
3207     name: A name for the operation (optional).
3208 
3209   Returns:
3210     A `Tensor` of type `resource`.
3211   """
3212   _ctx = _context._context
3213   if _ctx is None or not _ctx._eager_context.is_eager:
3214     if output_types is None:
3215       output_types = []
3216     if not isinstance(output_types, (list, tuple)):
3217       raise TypeError(
3218           "Expected list for 'output_types' argument to "
3219           "'multi_device_iterator_from_string_handle' Op, not %r." % output_types)
3220     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3221     if output_shapes is None:
3222       output_shapes = []
3223     if not isinstance(output_shapes, (list, tuple)):
3224       raise TypeError(
3225           "Expected list for 'output_shapes' argument to "
3226           "'multi_device_iterator_from_string_handle' Op, not %r." % output_shapes)
3227     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3228     _, _, _op = _op_def_lib._apply_op_helper(
3229         "MultiDeviceIteratorFromStringHandle", string_handle=string_handle,
3230         output_types=output_types, output_shapes=output_shapes, name=name)
3231     _result = _op.outputs[:]
3232     _inputs_flat = _op.inputs
3233     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
3234               _op.get_attr("output_shapes"))
3235     _execute.record_gradient(
3236       "MultiDeviceIteratorFromStringHandle", _inputs_flat, _attrs, _result, name)
3237     _result, = _result
3238     return _result
3239 
3240   else:
3241     try:
3242       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3243         _ctx._context_handle, _ctx._eager_context.device_name,
3244         "MultiDeviceIteratorFromStringHandle", name,
3245         _ctx._post_execution_callbacks, string_handle, "output_types",
3246         output_types, "output_shapes", output_shapes)
3247       return _result
3248     except _core._FallbackException:
3249       return multi_device_iterator_from_string_handle_eager_fallback(
3250           string_handle, output_types=output_types,
3251           output_shapes=output_shapes, name=name, ctx=_ctx)
3252     except _core._NotOkStatusException as e:
3253       if name is not None:
3254         message = e.message + " name: " + name
3255       else:
3256         message = e.message
3257       _six.raise_from(_core._status_to_exception(e.code, message), None)
3258 
3259 
3260 def multi_device_iterator_from_string_handle_eager_fallback(string_handle, output_types=[], output_shapes=[], name=None, ctx=None):
3261   r"""This is the slowpath function for Eager mode.
3262   This is for function multi_device_iterator_from_string_handle
3263   """
3264   _ctx = ctx if ctx else _context.context()
3265   if output_types is None:
3266     output_types = []
3267   if not isinstance(output_types, (list, tuple)):
3268     raise TypeError(
3269         "Expected list for 'output_types' argument to "
3270         "'multi_device_iterator_from_string_handle' Op, not %r." % output_types)
3271   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3272   if output_shapes is None:
3273     output_shapes = []
3274   if not isinstance(output_shapes, (list, tuple)):
3275     raise TypeError(
3276         "Expected list for 'output_shapes' argument to "
3277         "'multi_device_iterator_from_string_handle' Op, not %r." % output_shapes)
3278   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3279   string_handle = _ops.convert_to_tensor(string_handle, _dtypes.string)
3280   _inputs_flat = [string_handle]
3281   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
3282   _result = _execute.execute(b"MultiDeviceIteratorFromStringHandle", 1,
3283                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
3284                              name=name)
3285   _execute.record_gradient(
3286       "MultiDeviceIteratorFromStringHandle", _inputs_flat, _attrs, _result, name)
3287   _result, = _result
3288   return _result
3289 
3290 
3291 def multi_device_iterator_get_next_from_shard(multi_device_iterator, shard_num, incarnation_id, output_types, output_shapes, name=None):
3292   r"""Gets next element for the provided shard number.
3293 
3294   Args:
3295     multi_device_iterator: A `Tensor` of type `resource`.
3296       A MultiDeviceIterator resource.
3297     shard_num: A `Tensor` of type `int32`.
3298       Integer representing which shard to fetch data for.
3299     incarnation_id: A `Tensor` of type `int64`.
3300       Which incarnation of the MultiDeviceIterator is running.
3301     output_types: A list of `tf.DTypes` that has length `>= 1`.
3302       The type list for the return values.
3303     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3304       The list of shapes being produced.
3305     name: A name for the operation (optional).
3306 
3307   Returns:
3308     A list of `Tensor` objects of type `output_types`.
3309   """
3310   _ctx = _context._context
3311   if _ctx is None or not _ctx._eager_context.is_eager:
3312     if not isinstance(output_types, (list, tuple)):
3313       raise TypeError(
3314           "Expected list for 'output_types' argument to "
3315           "'multi_device_iterator_get_next_from_shard' Op, not %r." % output_types)
3316     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3317     if not isinstance(output_shapes, (list, tuple)):
3318       raise TypeError(
3319           "Expected list for 'output_shapes' argument to "
3320           "'multi_device_iterator_get_next_from_shard' Op, not %r." % output_shapes)
3321     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3322     _, _, _op = _op_def_lib._apply_op_helper(
3323         "MultiDeviceIteratorGetNextFromShard",
3324         multi_device_iterator=multi_device_iterator, shard_num=shard_num,
3325         incarnation_id=incarnation_id, output_types=output_types,
3326         output_shapes=output_shapes, name=name)
3327     _result = _op.outputs[:]
3328     if not _result:
3329       return _op
3330     _inputs_flat = _op.inputs
3331     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
3332               _op.get_attr("output_shapes"))
3333     _execute.record_gradient(
3334       "MultiDeviceIteratorGetNextFromShard", _inputs_flat, _attrs, _result, name)
3335     return _result
3336 
3337   else:
3338     try:
3339       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3340         _ctx._context_handle, _ctx._eager_context.device_name,
3341         "MultiDeviceIteratorGetNextFromShard", name,
3342         _ctx._post_execution_callbacks, multi_device_iterator, shard_num,
3343         incarnation_id, "output_types", output_types, "output_shapes",
3344         output_shapes)
3345       return _result
3346     except _core._FallbackException:
3347       return multi_device_iterator_get_next_from_shard_eager_fallback(
3348           multi_device_iterator, shard_num, incarnation_id,
3349           output_types=output_types, output_shapes=output_shapes, name=name,
3350           ctx=_ctx)
3351     except _core._NotOkStatusException as e:
3352       if name is not None:
3353         message = e.message + " name: " + name
3354       else:
3355         message = e.message
3356       _six.raise_from(_core._status_to_exception(e.code, message), None)
3357 
3358 
3359 def multi_device_iterator_get_next_from_shard_eager_fallback(multi_device_iterator, shard_num, incarnation_id, output_types, output_shapes, name=None, ctx=None):
3360   r"""This is the slowpath function for Eager mode.
3361   This is for function multi_device_iterator_get_next_from_shard
3362   """
3363   _ctx = ctx if ctx else _context.context()
3364   if not isinstance(output_types, (list, tuple)):
3365     raise TypeError(
3366         "Expected list for 'output_types' argument to "
3367         "'multi_device_iterator_get_next_from_shard' Op, not %r." % output_types)
3368   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3369   if not isinstance(output_shapes, (list, tuple)):
3370     raise TypeError(
3371         "Expected list for 'output_shapes' argument to "
3372         "'multi_device_iterator_get_next_from_shard' Op, not %r." % output_shapes)
3373   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3374   multi_device_iterator = _ops.convert_to_tensor(multi_device_iterator, _dtypes.resource)
3375   shard_num = _ops.convert_to_tensor(shard_num, _dtypes.int32)
3376   incarnation_id = _ops.convert_to_tensor(incarnation_id, _dtypes.int64)
3377   _inputs_flat = [multi_device_iterator, shard_num, incarnation_id]
3378   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
3379   _result = _execute.execute(b"MultiDeviceIteratorGetNextFromShard",
3380                              len(output_types), inputs=_inputs_flat,
3381                              attrs=_attrs, ctx=_ctx, name=name)
3382   _execute.record_gradient(
3383       "MultiDeviceIteratorGetNextFromShard", _inputs_flat, _attrs, _result, name)
3384   return _result
3385 
3386 
3387 def multi_device_iterator_init(dataset, multi_device_iterator, max_buffer_size, name=None):
3388   r"""Initializes the multi device iterator with the given dataset.
3389 
3390   Args:
3391     dataset: A `Tensor` of type `variant`. Dataset to be iterated upon.
3392     multi_device_iterator: A `Tensor` of type `resource`.
3393       A MultiDeviceIteratorResource.
3394     max_buffer_size: A `Tensor` of type `int64`.
3395       The maximum size of the host side per device buffer to keep.
3396     name: A name for the operation (optional).
3397 
3398   Returns:
3399     A `Tensor` of type `int64`.
3400   """
3401   _ctx = _context._context
3402   if _ctx is None or not _ctx._eager_context.is_eager:
3403     _, _, _op = _op_def_lib._apply_op_helper(
3404         "MultiDeviceIteratorInit", dataset=dataset,
3405         multi_device_iterator=multi_device_iterator,
3406         max_buffer_size=max_buffer_size, name=name)
3407     _result = _op.outputs[:]
3408     _inputs_flat = _op.inputs
3409     _attrs = None
3410     _execute.record_gradient(
3411       "MultiDeviceIteratorInit", _inputs_flat, _attrs, _result, name)
3412     _result, = _result
3413     return _result
3414 
3415   else:
3416     try:
3417       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3418         _ctx._context_handle, _ctx._eager_context.device_name,
3419         "MultiDeviceIteratorInit", name, _ctx._post_execution_callbacks,
3420         dataset, multi_device_iterator, max_buffer_size)
3421       return _result
3422     except _core._FallbackException:
3423       return multi_device_iterator_init_eager_fallback(
3424           dataset, multi_device_iterator, max_buffer_size, name=name,
3425           ctx=_ctx)
3426     except _core._NotOkStatusException as e:
3427       if name is not None:
3428         message = e.message + " name: " + name
3429       else:
3430         message = e.message
3431       _six.raise_from(_core._status_to_exception(e.code, message), None)
3432 
3433 
3434 def multi_device_iterator_init_eager_fallback(dataset, multi_device_iterator, max_buffer_size, name=None, ctx=None):
3435   r"""This is the slowpath function for Eager mode.
3436   This is for function multi_device_iterator_init
3437   """
3438   _ctx = ctx if ctx else _context.context()
3439   dataset = _ops.convert_to_tensor(dataset, _dtypes.variant)
3440   multi_device_iterator = _ops.convert_to_tensor(multi_device_iterator, _dtypes.resource)
3441   max_buffer_size = _ops.convert_to_tensor(max_buffer_size, _dtypes.int64)
3442   _inputs_flat = [dataset, multi_device_iterator, max_buffer_size]
3443   _attrs = None
3444   _result = _execute.execute(b"MultiDeviceIteratorInit", 1,
3445                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
3446                              name=name)
3447   _execute.record_gradient(
3448       "MultiDeviceIteratorInit", _inputs_flat, _attrs, _result, name)
3449   _result, = _result
3450   return _result
3451 
3452 
3453 def multi_device_iterator_to_string_handle(multi_device_iterator, name=None):
3454   r"""Produces a string handle for the given MultiDeviceIterator.
3455 
3456   Args:
3457     multi_device_iterator: A `Tensor` of type `resource`.
3458       A MultiDeviceIterator resource.
3459     name: A name for the operation (optional).
3460 
3461   Returns:
3462     A `Tensor` of type `string`.
3463   """
3464   _ctx = _context._context
3465   if _ctx is None or not _ctx._eager_context.is_eager:
3466     _, _, _op = _op_def_lib._apply_op_helper(
3467         "MultiDeviceIteratorToStringHandle",
3468         multi_device_iterator=multi_device_iterator, name=name)
3469     _result = _op.outputs[:]
3470     _inputs_flat = _op.inputs
3471     _attrs = None
3472     _execute.record_gradient(
3473       "MultiDeviceIteratorToStringHandle", _inputs_flat, _attrs, _result, name)
3474     _result, = _result
3475     return _result
3476 
3477   else:
3478     try:
3479       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3480         _ctx._context_handle, _ctx._eager_context.device_name,
3481         "MultiDeviceIteratorToStringHandle", name,
3482         _ctx._post_execution_callbacks, multi_device_iterator)
3483       return _result
3484     except _core._FallbackException:
3485       return multi_device_iterator_to_string_handle_eager_fallback(
3486           multi_device_iterator, name=name, ctx=_ctx)
3487     except _core._NotOkStatusException as e:
3488       if name is not None:
3489         message = e.message + " name: " + name
3490       else:
3491         message = e.message
3492       _six.raise_from(_core._status_to_exception(e.code, message), None)
3493 
3494 
3495 def multi_device_iterator_to_string_handle_eager_fallback(multi_device_iterator, name=None, ctx=None):
3496   r"""This is the slowpath function for Eager mode.
3497   This is for function multi_device_iterator_to_string_handle
3498   """
3499   _ctx = ctx if ctx else _context.context()
3500   multi_device_iterator = _ops.convert_to_tensor(multi_device_iterator, _dtypes.resource)
3501   _inputs_flat = [multi_device_iterator]
3502   _attrs = None
3503   _result = _execute.execute(b"MultiDeviceIteratorToStringHandle", 1,
3504                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
3505                              name=name)
3506   _execute.record_gradient(
3507       "MultiDeviceIteratorToStringHandle", _inputs_flat, _attrs, _result, name)
3508   _result, = _result
3509   return _result
3510 
3511 
3512 def one_shot_iterator(dataset_factory, output_types, output_shapes, container="", shared_name="", name=None):
3513   r"""Makes a "one-shot" iterator that can be iterated only once.
3514 
3515   A one-shot iterator bundles the logic for defining the dataset and
3516   the state of the iterator in a single op, which allows simple input
3517   pipelines to be defined without an additional initialization
3518   ("MakeIterator") step.
3519 
3520   One-shot iterators have the following limitations:
3521 
3522   * They do not support parameterization: all logic for creating the underlying
3523     dataset must be bundled in the `dataset_factory` function.
3524   * They are not resettable. Once a one-shot iterator reaches the end of its
3525     underlying dataset, subsequent "IteratorGetNext" operations on that
3526     iterator will always produce an `OutOfRange` error.
3527 
3528   For greater flexibility, use "Iterator" and "MakeIterator" to define
3529   an iterator using an arbitrary subgraph, which may capture tensors
3530   (including fed values) as parameters, and which may be reset multiple
3531   times by rerunning "MakeIterator".
3532 
3533   Args:
3534     dataset_factory: A function decorated with @Defun.
3535       A function of type `() -> DT_VARIANT`, where the returned
3536       DT_VARIANT is a dataset.
3537     output_types: A list of `tf.DTypes` that has length `>= 1`.
3538     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3539     container: An optional `string`. Defaults to `""`.
3540     shared_name: An optional `string`. Defaults to `""`.
3541     name: A name for the operation (optional).
3542 
3543   Returns:
3544     A `Tensor` of type `resource`.
3545   """
3546   _ctx = _context._context
3547   if _ctx is None or not _ctx._eager_context.is_eager:
3548     if not isinstance(output_types, (list, tuple)):
3549       raise TypeError(
3550           "Expected list for 'output_types' argument to "
3551           "'one_shot_iterator' Op, not %r." % output_types)
3552     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3553     if not isinstance(output_shapes, (list, tuple)):
3554       raise TypeError(
3555           "Expected list for 'output_shapes' argument to "
3556           "'one_shot_iterator' Op, not %r." % output_shapes)
3557     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3558     if container is None:
3559       container = ""
3560     container = _execute.make_str(container, "container")
3561     if shared_name is None:
3562       shared_name = ""
3563     shared_name = _execute.make_str(shared_name, "shared_name")
3564     _, _, _op = _op_def_lib._apply_op_helper(
3565         "OneShotIterator", dataset_factory=dataset_factory,
3566         output_types=output_types, output_shapes=output_shapes,
3567         container=container, shared_name=shared_name, name=name)
3568     _result = _op.outputs[:]
3569     _inputs_flat = _op.inputs
3570     _attrs = ("dataset_factory", _op.get_attr("dataset_factory"),
3571               "output_types", _op.get_attr("output_types"), "output_shapes",
3572               _op.get_attr("output_shapes"), "container",
3573               _op.get_attr("container"), "shared_name",
3574               _op.get_attr("shared_name"))
3575     _execute.record_gradient(
3576       "OneShotIterator", _inputs_flat, _attrs, _result, name)
3577     _result, = _result
3578     return _result
3579 
3580   else:
3581     try:
3582       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3583         _ctx._context_handle, _ctx._eager_context.device_name,
3584         "OneShotIterator", name, _ctx._post_execution_callbacks,
3585         "dataset_factory", dataset_factory, "output_types", output_types,
3586         "output_shapes", output_shapes, "container", container, "shared_name",
3587         shared_name)
3588       return _result
3589     except _core._FallbackException:
3590       return one_shot_iterator_eager_fallback(
3591           dataset_factory=dataset_factory, output_types=output_types,
3592           output_shapes=output_shapes, container=container,
3593           shared_name=shared_name, name=name, ctx=_ctx)
3594     except _core._NotOkStatusException as e:
3595       if name is not None:
3596         message = e.message + " name: " + name
3597       else:
3598         message = e.message
3599       _six.raise_from(_core._status_to_exception(e.code, message), None)
3600 
3601 
3602 def one_shot_iterator_eager_fallback(dataset_factory, output_types, output_shapes, container="", shared_name="", name=None, ctx=None):
3603   r"""This is the slowpath function for Eager mode.
3604   This is for function one_shot_iterator
3605   """
3606   _ctx = ctx if ctx else _context.context()
3607   if not isinstance(output_types, (list, tuple)):
3608     raise TypeError(
3609         "Expected list for 'output_types' argument to "
3610         "'one_shot_iterator' Op, not %r." % output_types)
3611   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3612   if not isinstance(output_shapes, (list, tuple)):
3613     raise TypeError(
3614         "Expected list for 'output_shapes' argument to "
3615         "'one_shot_iterator' Op, not %r." % output_shapes)
3616   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3617   if container is None:
3618     container = ""
3619   container = _execute.make_str(container, "container")
3620   if shared_name is None:
3621     shared_name = ""
3622   shared_name = _execute.make_str(shared_name, "shared_name")
3623   _inputs_flat = []
3624   _attrs = ("dataset_factory", dataset_factory, "output_types", output_types,
3625   "output_shapes", output_shapes, "container", container, "shared_name",
3626   shared_name)
3627   _result = _execute.execute(b"OneShotIterator", 1, inputs=_inputs_flat,
3628                              attrs=_attrs, ctx=_ctx, name=name)
3629   _execute.record_gradient(
3630       "OneShotIterator", _inputs_flat, _attrs, _result, name)
3631   _result, = _result
3632   return _result
3633 
3634 
3635 def optimize_dataset(input_dataset, optimizations, output_types, output_shapes, name=None):
3636   r"""Creates a dataset by applying optimizations to `input_dataset`.
3637 
3638   Creates a dataset by applying optimizations to `input_dataset`.
3639 
3640   Args:
3641     input_dataset: A `Tensor` of type `variant`.
3642       A variant tensor representing the input dataset.
3643     optimizations: A `Tensor` of type `string`.
3644       A `tf.string` vector `tf.Tensor` identifying optimizations to use.
3645     output_types: A list of `tf.DTypes` that has length `>= 1`.
3646     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3647     name: A name for the operation (optional).
3648 
3649   Returns:
3650     A `Tensor` of type `variant`.
3651   """
3652   _ctx = _context._context
3653   if _ctx is None or not _ctx._eager_context.is_eager:
3654     if not isinstance(output_types, (list, tuple)):
3655       raise TypeError(
3656           "Expected list for 'output_types' argument to "
3657           "'optimize_dataset' Op, not %r." % output_types)
3658     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3659     if not isinstance(output_shapes, (list, tuple)):
3660       raise TypeError(
3661           "Expected list for 'output_shapes' argument to "
3662           "'optimize_dataset' Op, not %r." % output_shapes)
3663     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3664     _, _, _op = _op_def_lib._apply_op_helper(
3665         "OptimizeDataset", input_dataset=input_dataset,
3666         optimizations=optimizations, output_types=output_types,
3667         output_shapes=output_shapes, name=name)
3668     _result = _op.outputs[:]
3669     _inputs_flat = _op.inputs
3670     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
3671               _op.get_attr("output_shapes"))
3672     _execute.record_gradient(
3673       "OptimizeDataset", _inputs_flat, _attrs, _result, name)
3674     _result, = _result
3675     return _result
3676 
3677   else:
3678     try:
3679       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3680         _ctx._context_handle, _ctx._eager_context.device_name,
3681         "OptimizeDataset", name, _ctx._post_execution_callbacks,
3682         input_dataset, optimizations, "output_types", output_types,
3683         "output_shapes", output_shapes)
3684       return _result
3685     except _core._FallbackException:
3686       return optimize_dataset_eager_fallback(
3687           input_dataset, optimizations, output_types=output_types,
3688           output_shapes=output_shapes, name=name, ctx=_ctx)
3689     except _core._NotOkStatusException as e:
3690       if name is not None:
3691         message = e.message + " name: " + name
3692       else:
3693         message = e.message
3694       _six.raise_from(_core._status_to_exception(e.code, message), None)
3695 
3696 
3697 def optimize_dataset_eager_fallback(input_dataset, optimizations, output_types, output_shapes, name=None, ctx=None):
3698   r"""This is the slowpath function for Eager mode.
3699   This is for function optimize_dataset
3700   """
3701   _ctx = ctx if ctx else _context.context()
3702   if not isinstance(output_types, (list, tuple)):
3703     raise TypeError(
3704         "Expected list for 'output_types' argument to "
3705         "'optimize_dataset' Op, not %r." % output_types)
3706   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3707   if not isinstance(output_shapes, (list, tuple)):
3708     raise TypeError(
3709         "Expected list for 'output_shapes' argument to "
3710         "'optimize_dataset' Op, not %r." % output_shapes)
3711   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3712   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
3713   optimizations = _ops.convert_to_tensor(optimizations, _dtypes.string)
3714   _inputs_flat = [input_dataset, optimizations]
3715   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
3716   _result = _execute.execute(b"OptimizeDataset", 1, inputs=_inputs_flat,
3717                              attrs=_attrs, ctx=_ctx, name=name)
3718   _execute.record_gradient(
3719       "OptimizeDataset", _inputs_flat, _attrs, _result, name)
3720   _result, = _result
3721   return _result
3722 
3723 
3724 def optional_from_value(components, name=None):
3725   r"""Constructs an Optional variant from a tuple of tensors.
3726 
3727   Args:
3728     components: A list of `Tensor` objects.
3729     name: A name for the operation (optional).
3730 
3731   Returns:
3732     A `Tensor` of type `variant`.
3733   """
3734   _ctx = _context._context
3735   if _ctx is None or not _ctx._eager_context.is_eager:
3736     _, _, _op = _op_def_lib._apply_op_helper(
3737         "OptionalFromValue", components=components, name=name)
3738     _result = _op.outputs[:]
3739     _inputs_flat = _op.inputs
3740     _attrs = ("Toutput_types", _op.get_attr("Toutput_types"))
3741     _execute.record_gradient(
3742       "OptionalFromValue", _inputs_flat, _attrs, _result, name)
3743     _result, = _result
3744     return _result
3745 
3746   else:
3747     try:
3748       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3749         _ctx._context_handle, _ctx._eager_context.device_name,
3750         "OptionalFromValue", name, _ctx._post_execution_callbacks, components)
3751       return _result
3752     except _core._FallbackException:
3753       return optional_from_value_eager_fallback(
3754           components, name=name, ctx=_ctx)
3755     except _core._NotOkStatusException as e:
3756       if name is not None:
3757         message = e.message + " name: " + name
3758       else:
3759         message = e.message
3760       _six.raise_from(_core._status_to_exception(e.code, message), None)
3761 
3762 
3763 def optional_from_value_eager_fallback(components, name=None, ctx=None):
3764   r"""This is the slowpath function for Eager mode.
3765   This is for function optional_from_value
3766   """
3767   _ctx = ctx if ctx else _context.context()
3768   _attr_Toutput_types, components = _execute.convert_to_mixed_eager_tensors(components, _ctx)
3769   _inputs_flat = list(components)
3770   _attrs = ("Toutput_types", _attr_Toutput_types)
3771   _result = _execute.execute(b"OptionalFromValue", 1, inputs=_inputs_flat,
3772                              attrs=_attrs, ctx=_ctx, name=name)
3773   _execute.record_gradient(
3774       "OptionalFromValue", _inputs_flat, _attrs, _result, name)
3775   _result, = _result
3776   return _result
3777 
3778 
3779 def optional_get_value(optional, output_types, output_shapes, name=None):
3780   r"""Returns the value stored in an Optional variant or raises an error if none exists.
3781 
3782   Args:
3783     optional: A `Tensor` of type `variant`.
3784     output_types: A list of `tf.DTypes` that has length `>= 1`.
3785     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3786     name: A name for the operation (optional).
3787 
3788   Returns:
3789     A list of `Tensor` objects of type `output_types`.
3790   """
3791   _ctx = _context._context
3792   if _ctx is None or not _ctx._eager_context.is_eager:
3793     if not isinstance(output_types, (list, tuple)):
3794       raise TypeError(
3795           "Expected list for 'output_types' argument to "
3796           "'optional_get_value' Op, not %r." % output_types)
3797     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3798     if not isinstance(output_shapes, (list, tuple)):
3799       raise TypeError(
3800           "Expected list for 'output_shapes' argument to "
3801           "'optional_get_value' Op, not %r." % output_shapes)
3802     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3803     _, _, _op = _op_def_lib._apply_op_helper(
3804         "OptionalGetValue", optional=optional, output_types=output_types,
3805         output_shapes=output_shapes, name=name)
3806     _result = _op.outputs[:]
3807     _inputs_flat = _op.inputs
3808     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
3809               _op.get_attr("output_shapes"))
3810     _execute.record_gradient(
3811       "OptionalGetValue", _inputs_flat, _attrs, _result, name)
3812     return _result
3813 
3814   else:
3815     try:
3816       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3817         _ctx._context_handle, _ctx._eager_context.device_name,
3818         "OptionalGetValue", name, _ctx._post_execution_callbacks, optional,
3819         "output_types", output_types, "output_shapes", output_shapes)
3820       return _result
3821     except _core._FallbackException:
3822       return optional_get_value_eager_fallback(
3823           optional, output_types=output_types, output_shapes=output_shapes,
3824           name=name, ctx=_ctx)
3825     except _core._NotOkStatusException as e:
3826       if name is not None:
3827         message = e.message + " name: " + name
3828       else:
3829         message = e.message
3830       _six.raise_from(_core._status_to_exception(e.code, message), None)
3831 
3832 
3833 def optional_get_value_eager_fallback(optional, output_types, output_shapes, name=None, ctx=None):
3834   r"""This is the slowpath function for Eager mode.
3835   This is for function optional_get_value
3836   """
3837   _ctx = ctx if ctx else _context.context()
3838   if not isinstance(output_types, (list, tuple)):
3839     raise TypeError(
3840         "Expected list for 'output_types' argument to "
3841         "'optional_get_value' Op, not %r." % output_types)
3842   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
3843   if not isinstance(output_shapes, (list, tuple)):
3844     raise TypeError(
3845         "Expected list for 'output_shapes' argument to "
3846         "'optional_get_value' Op, not %r." % output_shapes)
3847   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
3848   optional = _ops.convert_to_tensor(optional, _dtypes.variant)
3849   _inputs_flat = [optional]
3850   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
3851   _result = _execute.execute(b"OptionalGetValue", len(output_types),
3852                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
3853                              name=name)
3854   _execute.record_gradient(
3855       "OptionalGetValue", _inputs_flat, _attrs, _result, name)
3856   return _result
3857 
3858 
3859 def optional_has_value(optional, name=None):
3860   r"""Returns true if and only if the given Optional variant has a value.
3861 
3862   Args:
3863     optional: A `Tensor` of type `variant`.
3864     name: A name for the operation (optional).
3865 
3866   Returns:
3867     A `Tensor` of type `bool`.
3868   """
3869   _ctx = _context._context
3870   if _ctx is None or not _ctx._eager_context.is_eager:
3871     _, _, _op = _op_def_lib._apply_op_helper(
3872         "OptionalHasValue", optional=optional, name=name)
3873     _result = _op.outputs[:]
3874     _inputs_flat = _op.inputs
3875     _attrs = None
3876     _execute.record_gradient(
3877       "OptionalHasValue", _inputs_flat, _attrs, _result, name)
3878     _result, = _result
3879     return _result
3880 
3881   else:
3882     try:
3883       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3884         _ctx._context_handle, _ctx._eager_context.device_name,
3885         "OptionalHasValue", name, _ctx._post_execution_callbacks, optional)
3886       return _result
3887     except _core._FallbackException:
3888       return optional_has_value_eager_fallback(
3889           optional, name=name, ctx=_ctx)
3890     except _core._NotOkStatusException as e:
3891       if name is not None:
3892         message = e.message + " name: " + name
3893       else:
3894         message = e.message
3895       _six.raise_from(_core._status_to_exception(e.code, message), None)
3896 
3897 
3898 def optional_has_value_eager_fallback(optional, name=None, ctx=None):
3899   r"""This is the slowpath function for Eager mode.
3900   This is for function optional_has_value
3901   """
3902   _ctx = ctx if ctx else _context.context()
3903   optional = _ops.convert_to_tensor(optional, _dtypes.variant)
3904   _inputs_flat = [optional]
3905   _attrs = None
3906   _result = _execute.execute(b"OptionalHasValue", 1, inputs=_inputs_flat,
3907                              attrs=_attrs, ctx=_ctx, name=name)
3908   _execute.record_gradient(
3909       "OptionalHasValue", _inputs_flat, _attrs, _result, name)
3910   _result, = _result
3911   return _result
3912 
3913 
3914 def optional_none(name=None):
3915   r"""Creates an Optional variant with no value.
3916 
3917   Args:
3918     name: A name for the operation (optional).
3919 
3920   Returns:
3921     A `Tensor` of type `variant`.
3922   """
3923   _ctx = _context._context
3924   if _ctx is None or not _ctx._eager_context.is_eager:
3925     _, _, _op = _op_def_lib._apply_op_helper(
3926         "OptionalNone", name=name)
3927     _result = _op.outputs[:]
3928     _inputs_flat = _op.inputs
3929     _attrs = None
3930     _execute.record_gradient(
3931       "OptionalNone", _inputs_flat, _attrs, _result, name)
3932     _result, = _result
3933     return _result
3934 
3935   else:
3936     try:
3937       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
3938         _ctx._context_handle, _ctx._eager_context.device_name, "OptionalNone",
3939         name, _ctx._post_execution_callbacks)
3940       return _result
3941     except _core._FallbackException:
3942       return optional_none_eager_fallback(
3943           name=name, ctx=_ctx)
3944     except _core._NotOkStatusException as e:
3945       if name is not None:
3946         message = e.message + " name: " + name
3947       else:
3948         message = e.message
3949       _six.raise_from(_core._status_to_exception(e.code, message), None)
3950 
3951 
3952 def optional_none_eager_fallback(name=None, ctx=None):
3953   r"""This is the slowpath function for Eager mode.
3954   This is for function optional_none
3955   """
3956   _ctx = ctx if ctx else _context.context()
3957   _inputs_flat = []
3958   _attrs = None
3959   _result = _execute.execute(b"OptionalNone", 1, inputs=_inputs_flat,
3960                              attrs=_attrs, ctx=_ctx, name=name)
3961   _execute.record_gradient(
3962       "OptionalNone", _inputs_flat, _attrs, _result, name)
3963   _result, = _result
3964   return _result
3965 
3966 
3967 def padded_batch_dataset(input_dataset, batch_size, padded_shapes, padding_values, output_shapes, name=None):
3968   r"""Creates a dataset that batches and pads `batch_size` elements from the input.
3969 
3970   Args:
3971     input_dataset: A `Tensor` of type `variant`.
3972     batch_size: A `Tensor` of type `int64`.
3973       A scalar representing the number of elements to accumulate in a
3974       batch.
3975     padded_shapes: A list of at least 1 `Tensor` objects with type `int64`.
3976       A list of int64 tensors representing the desired padded shapes
3977       of the corresponding output components. These shapes may be partially
3978       specified, using `-1` to indicate that a particular dimension should be
3979       padded to the maximum size of all batch elements.
3980     padding_values: A list of `Tensor` objects.
3981       A list of scalars containing the padding value to use for
3982       each of the outputs.
3983     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
3984     name: A name for the operation (optional).
3985 
3986   Returns:
3987     A `Tensor` of type `variant`.
3988   """
3989   _ctx = _context._context
3990   if _ctx is None or not _ctx._eager_context.is_eager:
3991     if not isinstance(padded_shapes, (list, tuple)):
3992       raise TypeError(
3993           "Expected list for 'padded_shapes' argument to "
3994           "'padded_batch_dataset' Op, not %r." % padded_shapes)
3995     _attr_N = len(padded_shapes)
3996     if not isinstance(output_shapes, (list, tuple)):
3997       raise TypeError(
3998           "Expected list for 'output_shapes' argument to "
3999           "'padded_batch_dataset' Op, not %r." % output_shapes)
4000     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4001     _, _, _op = _op_def_lib._apply_op_helper(
4002         "PaddedBatchDataset", input_dataset=input_dataset,
4003         batch_size=batch_size, padded_shapes=padded_shapes,
4004         padding_values=padding_values, output_shapes=output_shapes, name=name)
4005     _result = _op.outputs[:]
4006     _inputs_flat = _op.inputs
4007     _attrs = ("Toutput_types", _op.get_attr("Toutput_types"), "output_shapes",
4008               _op.get_attr("output_shapes"), "N", _op.get_attr("N"))
4009     _execute.record_gradient(
4010       "PaddedBatchDataset", _inputs_flat, _attrs, _result, name)
4011     _result, = _result
4012     return _result
4013 
4014   else:
4015     try:
4016       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
4017         _ctx._context_handle, _ctx._eager_context.device_name,
4018         "PaddedBatchDataset", name, _ctx._post_execution_callbacks,
4019         input_dataset, batch_size, padded_shapes, padding_values,
4020         "output_shapes", output_shapes)
4021       return _result
4022     except _core._FallbackException:
4023       return padded_batch_dataset_eager_fallback(
4024           input_dataset, batch_size, padded_shapes, padding_values,
4025           output_shapes=output_shapes, name=name, ctx=_ctx)
4026     except _core._NotOkStatusException as e:
4027       if name is not None:
4028         message = e.message + " name: " + name
4029       else:
4030         message = e.message
4031       _six.raise_from(_core._status_to_exception(e.code, message), None)
4032 
4033 
4034 def padded_batch_dataset_eager_fallback(input_dataset, batch_size, padded_shapes, padding_values, output_shapes, name=None, ctx=None):
4035   r"""This is the slowpath function for Eager mode.
4036   This is for function padded_batch_dataset
4037   """
4038   _ctx = ctx if ctx else _context.context()
4039   if not isinstance(padded_shapes, (list, tuple)):
4040     raise TypeError(
4041         "Expected list for 'padded_shapes' argument to "
4042         "'padded_batch_dataset' Op, not %r." % padded_shapes)
4043   _attr_N = len(padded_shapes)
4044   if not isinstance(output_shapes, (list, tuple)):
4045     raise TypeError(
4046         "Expected list for 'output_shapes' argument to "
4047         "'padded_batch_dataset' Op, not %r." % output_shapes)
4048   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4049   _attr_Toutput_types, padding_values = _execute.convert_to_mixed_eager_tensors(padding_values, _ctx)
4050   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4051   batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
4052   padded_shapes = _ops.convert_n_to_tensor(padded_shapes, _dtypes.int64)
4053   _inputs_flat = [input_dataset, batch_size] + list(padded_shapes) + list(padding_values)
4054   _attrs = ("Toutput_types", _attr_Toutput_types, "output_shapes",
4055   output_shapes, "N", _attr_N)
4056   _result = _execute.execute(b"PaddedBatchDataset", 1, inputs=_inputs_flat,
4057                              attrs=_attrs, ctx=_ctx, name=name)
4058   _execute.record_gradient(
4059       "PaddedBatchDataset", _inputs_flat, _attrs, _result, name)
4060   _result, = _result
4061   return _result
4062 
4063 
4064 def padded_batch_dataset_v2(input_dataset, batch_size, padded_shapes, padding_values, drop_remainder, output_shapes, name=None):
4065   r"""Creates a dataset that batches and pads `batch_size` elements from the input.
4066 
4067   Args:
4068     input_dataset: A `Tensor` of type `variant`.
4069     batch_size: A `Tensor` of type `int64`.
4070       A scalar representing the number of elements to accumulate in a
4071       batch.
4072     padded_shapes: A list of at least 1 `Tensor` objects with type `int64`.
4073       A list of int64 tensors representing the desired padded shapes
4074       of the corresponding output components. These shapes may be partially
4075       specified, using `-1` to indicate that a particular dimension should be
4076       padded to the maximum size of all batch elements.
4077     padding_values: A list of `Tensor` objects.
4078       A list of scalars containing the padding value to use for
4079       each of the outputs.
4080     drop_remainder: A `Tensor` of type `bool`.
4081       A scalar representing whether the last batch should be dropped in case its size
4082       is smaller than desired.
4083     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4084     name: A name for the operation (optional).
4085 
4086   Returns:
4087     A `Tensor` of type `variant`.
4088   """
4089   _ctx = _context._context
4090   if _ctx is None or not _ctx._eager_context.is_eager:
4091     if not isinstance(padded_shapes, (list, tuple)):
4092       raise TypeError(
4093           "Expected list for 'padded_shapes' argument to "
4094           "'padded_batch_dataset_v2' Op, not %r." % padded_shapes)
4095     _attr_N = len(padded_shapes)
4096     if not isinstance(output_shapes, (list, tuple)):
4097       raise TypeError(
4098           "Expected list for 'output_shapes' argument to "
4099           "'padded_batch_dataset_v2' Op, not %r." % output_shapes)
4100     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4101     _, _, _op = _op_def_lib._apply_op_helper(
4102         "PaddedBatchDatasetV2", input_dataset=input_dataset,
4103         batch_size=batch_size, padded_shapes=padded_shapes,
4104         padding_values=padding_values, drop_remainder=drop_remainder,
4105         output_shapes=output_shapes, name=name)
4106     _result = _op.outputs[:]
4107     _inputs_flat = _op.inputs
4108     _attrs = ("Toutput_types", _op.get_attr("Toutput_types"), "output_shapes",
4109               _op.get_attr("output_shapes"), "N", _op.get_attr("N"))
4110     _execute.record_gradient(
4111       "PaddedBatchDatasetV2", _inputs_flat, _attrs, _result, name)
4112     _result, = _result
4113     return _result
4114 
4115   else:
4116     try:
4117       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
4118         _ctx._context_handle, _ctx._eager_context.device_name,
4119         "PaddedBatchDatasetV2", name, _ctx._post_execution_callbacks,
4120         input_dataset, batch_size, padded_shapes, padding_values,
4121         drop_remainder, "output_shapes", output_shapes)
4122       return _result
4123     except _core._FallbackException:
4124       return padded_batch_dataset_v2_eager_fallback(
4125           input_dataset, batch_size, padded_shapes, padding_values,
4126           drop_remainder, output_shapes=output_shapes, name=name, ctx=_ctx)
4127     except _core._NotOkStatusException as e:
4128       if name is not None:
4129         message = e.message + " name: " + name
4130       else:
4131         message = e.message
4132       _six.raise_from(_core._status_to_exception(e.code, message), None)
4133 
4134 
4135 def padded_batch_dataset_v2_eager_fallback(input_dataset, batch_size, padded_shapes, padding_values, drop_remainder, output_shapes, name=None, ctx=None):
4136   r"""This is the slowpath function for Eager mode.
4137   This is for function padded_batch_dataset_v2
4138   """
4139   _ctx = ctx if ctx else _context.context()
4140   if not isinstance(padded_shapes, (list, tuple)):
4141     raise TypeError(
4142         "Expected list for 'padded_shapes' argument to "
4143         "'padded_batch_dataset_v2' Op, not %r." % padded_shapes)
4144   _attr_N = len(padded_shapes)
4145   if not isinstance(output_shapes, (list, tuple)):
4146     raise TypeError(
4147         "Expected list for 'output_shapes' argument to "
4148         "'padded_batch_dataset_v2' Op, not %r." % output_shapes)
4149   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4150   _attr_Toutput_types, padding_values = _execute.convert_to_mixed_eager_tensors(padding_values, _ctx)
4151   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4152   batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
4153   padded_shapes = _ops.convert_n_to_tensor(padded_shapes, _dtypes.int64)
4154   drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool)
4155   _inputs_flat = [input_dataset, batch_size] + list(padded_shapes) + list(padding_values) + [drop_remainder]
4156   _attrs = ("Toutput_types", _attr_Toutput_types, "output_shapes",
4157   output_shapes, "N", _attr_N)
4158   _result = _execute.execute(b"PaddedBatchDatasetV2", 1, inputs=_inputs_flat,
4159                              attrs=_attrs, ctx=_ctx, name=name)
4160   _execute.record_gradient(
4161       "PaddedBatchDatasetV2", _inputs_flat, _attrs, _result, name)
4162   _result, = _result
4163   return _result
4164 
4165 
4166 def parallel_interleave_dataset(input_dataset, other_arguments, cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements, f, output_types, output_shapes, name=None):
4167   r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
4168 
4169   The resulting dataset is similar to the `InterleaveDataset`, with the exception
4170   that if retrieving the next value from a dataset would cause the requester to
4171   block, it will skip that input dataset. This dataset is especially useful
4172   when loading data from a variable-latency datastores (e.g. HDFS, GCS), as it
4173   allows the training step to proceed so long as some data is available.
4174 
4175   !! WARNING !! This dataset is not deterministic!
4176 
4177   Args:
4178     input_dataset: A `Tensor` of type `variant`.
4179     other_arguments: A list of `Tensor` objects.
4180     cycle_length: A `Tensor` of type `int64`.
4181     block_length: A `Tensor` of type `int64`.
4182     sloppy: A `Tensor` of type `bool`.
4183     buffer_output_elements: A `Tensor` of type `int64`.
4184     prefetch_input_elements: A `Tensor` of type `int64`.
4185     f: A function decorated with @Defun.
4186       A function mapping elements of `input_dataset`, concatenated with
4187       `other_arguments`, to a Dataset variant that contains elements matching
4188       `output_types` and `output_shapes`.
4189     output_types: A list of `tf.DTypes` that has length `>= 1`.
4190     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4191     name: A name for the operation (optional).
4192 
4193   Returns:
4194     A `Tensor` of type `variant`.
4195   """
4196   _ctx = _context._context
4197   if _ctx is None or not _ctx._eager_context.is_eager:
4198     if not isinstance(output_types, (list, tuple)):
4199       raise TypeError(
4200           "Expected list for 'output_types' argument to "
4201           "'parallel_interleave_dataset' Op, not %r." % output_types)
4202     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4203     if not isinstance(output_shapes, (list, tuple)):
4204       raise TypeError(
4205           "Expected list for 'output_shapes' argument to "
4206           "'parallel_interleave_dataset' Op, not %r." % output_shapes)
4207     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4208     _, _, _op = _op_def_lib._apply_op_helper(
4209         "ParallelInterleaveDataset", input_dataset=input_dataset,
4210         other_arguments=other_arguments, cycle_length=cycle_length,
4211         block_length=block_length, sloppy=sloppy,
4212         buffer_output_elements=buffer_output_elements,
4213         prefetch_input_elements=prefetch_input_elements, f=f,
4214         output_types=output_types, output_shapes=output_shapes, name=name)
4215     _result = _op.outputs[:]
4216     _inputs_flat = _op.inputs
4217     _attrs = ("f", _op.get_attr("f"), "Targuments",
4218               _op.get_attr("Targuments"), "output_types",
4219               _op.get_attr("output_types"), "output_shapes",
4220               _op.get_attr("output_shapes"))
4221     _execute.record_gradient(
4222       "ParallelInterleaveDataset", _inputs_flat, _attrs, _result, name)
4223     _result, = _result
4224     return _result
4225 
4226   else:
4227     try:
4228       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
4229         _ctx._context_handle, _ctx._eager_context.device_name,
4230         "ParallelInterleaveDataset", name, _ctx._post_execution_callbacks,
4231         input_dataset, other_arguments, cycle_length, block_length, sloppy,
4232         buffer_output_elements, prefetch_input_elements, "f", f,
4233         "output_types", output_types, "output_shapes", output_shapes)
4234       return _result
4235     except _core._FallbackException:
4236       return parallel_interleave_dataset_eager_fallback(
4237           input_dataset, other_arguments, cycle_length, block_length, sloppy,
4238           buffer_output_elements, prefetch_input_elements, f=f,
4239           output_types=output_types, output_shapes=output_shapes, name=name,
4240           ctx=_ctx)
4241     except _core._NotOkStatusException as e:
4242       if name is not None:
4243         message = e.message + " name: " + name
4244       else:
4245         message = e.message
4246       _six.raise_from(_core._status_to_exception(e.code, message), None)
4247 
4248 
4249 def parallel_interleave_dataset_eager_fallback(input_dataset, other_arguments, cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements, f, output_types, output_shapes, name=None, ctx=None):
4250   r"""This is the slowpath function for Eager mode.
4251   This is for function parallel_interleave_dataset
4252   """
4253   _ctx = ctx if ctx else _context.context()
4254   if not isinstance(output_types, (list, tuple)):
4255     raise TypeError(
4256         "Expected list for 'output_types' argument to "
4257         "'parallel_interleave_dataset' Op, not %r." % output_types)
4258   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4259   if not isinstance(output_shapes, (list, tuple)):
4260     raise TypeError(
4261         "Expected list for 'output_shapes' argument to "
4262         "'parallel_interleave_dataset' Op, not %r." % output_shapes)
4263   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4264   _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
4265   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4266   cycle_length = _ops.convert_to_tensor(cycle_length, _dtypes.int64)
4267   block_length = _ops.convert_to_tensor(block_length, _dtypes.int64)
4268   sloppy = _ops.convert_to_tensor(sloppy, _dtypes.bool)
4269   buffer_output_elements = _ops.convert_to_tensor(buffer_output_elements, _dtypes.int64)
4270   prefetch_input_elements = _ops.convert_to_tensor(prefetch_input_elements, _dtypes.int64)
4271   _inputs_flat = [input_dataset] + list(other_arguments) + [cycle_length, block_length, sloppy, buffer_output_elements, prefetch_input_elements]
4272   _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
4273   output_types, "output_shapes", output_shapes)
4274   _result = _execute.execute(b"ParallelInterleaveDataset", 1,
4275                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
4276                              name=name)
4277   _execute.record_gradient(
4278       "ParallelInterleaveDataset", _inputs_flat, _attrs, _result, name)
4279   _result, = _result
4280   return _result
4281 
4282 
4283 def parallel_interleave_dataset_v2(input_dataset, other_arguments, cycle_length, block_length, num_parallel_calls, f, output_types, output_shapes, name=None):
4284   r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
4285 
4286   Args:
4287     input_dataset: A `Tensor` of type `variant`.
4288     other_arguments: A list of `Tensor` objects.
4289     cycle_length: A `Tensor` of type `int64`.
4290     block_length: A `Tensor` of type `int64`.
4291     num_parallel_calls: A `Tensor` of type `int64`.
4292     f: A function decorated with @Defun.
4293       A function mapping elements of `input_dataset`, concatenated with
4294       `other_arguments`, to a Dataset variant that contains elements matching
4295       `output_types` and `output_shapes`.
4296     output_types: A list of `tf.DTypes` that has length `>= 1`.
4297     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4298     name: A name for the operation (optional).
4299 
4300   Returns:
4301     A `Tensor` of type `variant`.
4302   """
4303   _ctx = _context._context
4304   if _ctx is None or not _ctx._eager_context.is_eager:
4305     if not isinstance(output_types, (list, tuple)):
4306       raise TypeError(
4307           "Expected list for 'output_types' argument to "
4308           "'parallel_interleave_dataset_v2' Op, not %r." % output_types)
4309     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4310     if not isinstance(output_shapes, (list, tuple)):
4311       raise TypeError(
4312           "Expected list for 'output_shapes' argument to "
4313           "'parallel_interleave_dataset_v2' Op, not %r." % output_shapes)
4314     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4315     _, _, _op = _op_def_lib._apply_op_helper(
4316         "ParallelInterleaveDatasetV2", input_dataset=input_dataset,
4317         other_arguments=other_arguments, cycle_length=cycle_length,
4318         block_length=block_length, num_parallel_calls=num_parallel_calls, f=f,
4319         output_types=output_types, output_shapes=output_shapes, name=name)
4320     _result = _op.outputs[:]
4321     _inputs_flat = _op.inputs
4322     _attrs = ("f", _op.get_attr("f"), "Targuments",
4323               _op.get_attr("Targuments"), "output_types",
4324               _op.get_attr("output_types"), "output_shapes",
4325               _op.get_attr("output_shapes"))
4326     _execute.record_gradient(
4327       "ParallelInterleaveDatasetV2", _inputs_flat, _attrs, _result, name)
4328     _result, = _result
4329     return _result
4330 
4331   else:
4332     try:
4333       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
4334         _ctx._context_handle, _ctx._eager_context.device_name,
4335         "ParallelInterleaveDatasetV2", name, _ctx._post_execution_callbacks,
4336         input_dataset, other_arguments, cycle_length, block_length,
4337         num_parallel_calls, "f", f, "output_types", output_types,
4338         "output_shapes", output_shapes)
4339       return _result
4340     except _core._FallbackException:
4341       return parallel_interleave_dataset_v2_eager_fallback(
4342           input_dataset, other_arguments, cycle_length, block_length,
4343           num_parallel_calls, f=f, output_types=output_types,
4344           output_shapes=output_shapes, name=name, ctx=_ctx)
4345     except _core._NotOkStatusException as e:
4346       if name is not None:
4347         message = e.message + " name: " + name
4348       else:
4349         message = e.message
4350       _six.raise_from(_core._status_to_exception(e.code, message), None)
4351 
4352 
4353 def parallel_interleave_dataset_v2_eager_fallback(input_dataset, other_arguments, cycle_length, block_length, num_parallel_calls, f, output_types, output_shapes, name=None, ctx=None):
4354   r"""This is the slowpath function for Eager mode.
4355   This is for function parallel_interleave_dataset_v2
4356   """
4357   _ctx = ctx if ctx else _context.context()
4358   if not isinstance(output_types, (list, tuple)):
4359     raise TypeError(
4360         "Expected list for 'output_types' argument to "
4361         "'parallel_interleave_dataset_v2' Op, not %r." % output_types)
4362   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4363   if not isinstance(output_shapes, (list, tuple)):
4364     raise TypeError(
4365         "Expected list for 'output_shapes' argument to "
4366         "'parallel_interleave_dataset_v2' Op, not %r." % output_shapes)
4367   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4368   _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
4369   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4370   cycle_length = _ops.convert_to_tensor(cycle_length, _dtypes.int64)
4371   block_length = _ops.convert_to_tensor(block_length, _dtypes.int64)
4372   num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64)
4373   _inputs_flat = [input_dataset] + list(other_arguments) + [cycle_length, block_length, num_parallel_calls]
4374   _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
4375   output_types, "output_shapes", output_shapes)
4376   _result = _execute.execute(b"ParallelInterleaveDatasetV2", 1,
4377                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
4378                              name=name)
4379   _execute.record_gradient(
4380       "ParallelInterleaveDatasetV2", _inputs_flat, _attrs, _result, name)
4381   _result, = _result
4382   return _result
4383 
4384 
4385 def parallel_map_dataset(input_dataset, other_arguments, num_parallel_calls, f, output_types, output_shapes, use_inter_op_parallelism=True, name=None):
4386   r"""Creates a dataset that applies `f` to the outputs of `input_dataset`.
4387 
4388   Unlike a "MapDataset", which applies `f` sequentially, this dataset invokes up
4389   to `num_parallel_calls` copies of `f` in parallel.
4390 
4391   Args:
4392     input_dataset: A `Tensor` of type `variant`.
4393     other_arguments: A list of `Tensor` objects.
4394     num_parallel_calls: A `Tensor` of type `int32`.
4395       The number of concurrent invocations of `f` that process
4396       elements from `input_dataset` in parallel.
4397     f: A function decorated with @Defun.
4398     output_types: A list of `tf.DTypes` that has length `>= 1`.
4399     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4400     use_inter_op_parallelism: An optional `bool`. Defaults to `True`.
4401     name: A name for the operation (optional).
4402 
4403   Returns:
4404     A `Tensor` of type `variant`.
4405   """
4406   _ctx = _context._context
4407   if _ctx is None or not _ctx._eager_context.is_eager:
4408     if not isinstance(output_types, (list, tuple)):
4409       raise TypeError(
4410           "Expected list for 'output_types' argument to "
4411           "'parallel_map_dataset' Op, not %r." % output_types)
4412     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4413     if not isinstance(output_shapes, (list, tuple)):
4414       raise TypeError(
4415           "Expected list for 'output_shapes' argument to "
4416           "'parallel_map_dataset' Op, not %r." % output_shapes)
4417     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4418     if use_inter_op_parallelism is None:
4419       use_inter_op_parallelism = True
4420     use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism")
4421     _, _, _op = _op_def_lib._apply_op_helper(
4422         "ParallelMapDataset", input_dataset=input_dataset,
4423         other_arguments=other_arguments,
4424         num_parallel_calls=num_parallel_calls, f=f, output_types=output_types,
4425         output_shapes=output_shapes,
4426         use_inter_op_parallelism=use_inter_op_parallelism, name=name)
4427     _result = _op.outputs[:]
4428     _inputs_flat = _op.inputs
4429     _attrs = ("f", _op.get_attr("f"), "Targuments",
4430               _op.get_attr("Targuments"), "output_types",
4431               _op.get_attr("output_types"), "output_shapes",
4432               _op.get_attr("output_shapes"), "use_inter_op_parallelism",
4433               _op.get_attr("use_inter_op_parallelism"))
4434     _execute.record_gradient(
4435       "ParallelMapDataset", _inputs_flat, _attrs, _result, name)
4436     _result, = _result
4437     return _result
4438 
4439   else:
4440     try:
4441       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
4442         _ctx._context_handle, _ctx._eager_context.device_name,
4443         "ParallelMapDataset", name, _ctx._post_execution_callbacks,
4444         input_dataset, other_arguments, num_parallel_calls, "f", f,
4445         "output_types", output_types, "output_shapes", output_shapes,
4446         "use_inter_op_parallelism", use_inter_op_parallelism)
4447       return _result
4448     except _core._FallbackException:
4449       return parallel_map_dataset_eager_fallback(
4450           input_dataset, other_arguments, num_parallel_calls, f=f,
4451           output_types=output_types, output_shapes=output_shapes,
4452           use_inter_op_parallelism=use_inter_op_parallelism, name=name,
4453           ctx=_ctx)
4454     except _core._NotOkStatusException as e:
4455       if name is not None:
4456         message = e.message + " name: " + name
4457       else:
4458         message = e.message
4459       _six.raise_from(_core._status_to_exception(e.code, message), None)
4460 
4461 
4462 def parallel_map_dataset_eager_fallback(input_dataset, other_arguments, num_parallel_calls, f, output_types, output_shapes, use_inter_op_parallelism=True, name=None, ctx=None):
4463   r"""This is the slowpath function for Eager mode.
4464   This is for function parallel_map_dataset
4465   """
4466   _ctx = ctx if ctx else _context.context()
4467   if not isinstance(output_types, (list, tuple)):
4468     raise TypeError(
4469         "Expected list for 'output_types' argument to "
4470         "'parallel_map_dataset' Op, not %r." % output_types)
4471   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4472   if not isinstance(output_shapes, (list, tuple)):
4473     raise TypeError(
4474         "Expected list for 'output_shapes' argument to "
4475         "'parallel_map_dataset' Op, not %r." % output_shapes)
4476   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4477   if use_inter_op_parallelism is None:
4478     use_inter_op_parallelism = True
4479   use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism")
4480   _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
4481   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4482   num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int32)
4483   _inputs_flat = [input_dataset] + list(other_arguments) + [num_parallel_calls]
4484   _attrs = ("f", f, "Targuments", _attr_Targuments, "output_types",
4485   output_types, "output_shapes", output_shapes, "use_inter_op_parallelism",
4486   use_inter_op_parallelism)
4487   _result = _execute.execute(b"ParallelMapDataset", 1, inputs=_inputs_flat,
4488                              attrs=_attrs, ctx=_ctx, name=name)
4489   _execute.record_gradient(
4490       "ParallelMapDataset", _inputs_flat, _attrs, _result, name)
4491   _result, = _result
4492   return _result
4493 
4494 
4495 def parse_example_dataset(input_dataset, num_parallel_calls, dense_defaults, sparse_keys, dense_keys, sparse_types, dense_shapes, output_types, output_shapes, name=None):
4496   r"""Transforms `input_dataset` containing `Example` protos as vectors of DT_STRING into a dataset of `Tensor` or `SparseTensor` objects representing the parsed features.
4497 
4498   Args:
4499     input_dataset: A `Tensor` of type `variant`.
4500     num_parallel_calls: A `Tensor` of type `int64`.
4501     dense_defaults: A list of `Tensor` objects with types from: `float32`, `int64`, `string`.
4502       A dict mapping string keys to `Tensor`s.
4503       The keys of the dict must match the dense_keys of the feature.
4504     sparse_keys: A list of `strings`.
4505       A list of string keys in the examples features.
4506       The results for these keys will be returned as `SparseTensor` objects.
4507     dense_keys: A list of `strings`.
4508       A list of Ndense string Tensors (scalars).
4509       The keys expected in the Examples features associated with dense values.
4510     sparse_types: A list of `tf.DTypes` from: `tf.float32, tf.int64, tf.string`.
4511       A list of `DTypes` of the same length as `sparse_keys`.
4512       Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`),
4513       and `tf.string` (`BytesList`) are supported.
4514     dense_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`).
4515       List of tuples with the same length as `dense_keys`.
4516       The shape of the data for each dense feature referenced by `dense_keys`.
4517       Required for any input tensors identified by `dense_keys`.  Must be
4518       either fully defined, or may contain an unknown first dimension.
4519       An unknown first dimension means the feature is treated as having
4520       a variable number of blocks, and the output shape along this dimension
4521       is considered unknown at graph build time.  Padding is applied for
4522       minibatch elements smaller than the maximum number of blocks for the
4523       given feature along this dimension.
4524     output_types: A list of `tf.DTypes` that has length `>= 1`.
4525       The type list for the return values.
4526     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4527       The list of shapes being produced.
4528     name: A name for the operation (optional).
4529 
4530   Returns:
4531     A `Tensor` of type `variant`.
4532   """
4533   _ctx = _context._context
4534   if _ctx is None or not _ctx._eager_context.is_eager:
4535     if not isinstance(sparse_keys, (list, tuple)):
4536       raise TypeError(
4537           "Expected list for 'sparse_keys' argument to "
4538           "'parse_example_dataset' Op, not %r." % sparse_keys)
4539     sparse_keys = [_execute.make_str(_s, "sparse_keys") for _s in sparse_keys]
4540     if not isinstance(dense_keys, (list, tuple)):
4541       raise TypeError(
4542           "Expected list for 'dense_keys' argument to "
4543           "'parse_example_dataset' Op, not %r." % dense_keys)
4544     dense_keys = [_execute.make_str(_s, "dense_keys") for _s in dense_keys]
4545     if not isinstance(sparse_types, (list, tuple)):
4546       raise TypeError(
4547           "Expected list for 'sparse_types' argument to "
4548           "'parse_example_dataset' Op, not %r." % sparse_types)
4549     sparse_types = [_execute.make_type(_t, "sparse_types") for _t in sparse_types]
4550     if not isinstance(dense_shapes, (list, tuple)):
4551       raise TypeError(
4552           "Expected list for 'dense_shapes' argument to "
4553           "'parse_example_dataset' Op, not %r." % dense_shapes)
4554     dense_shapes = [_execute.make_shape(_s, "dense_shapes") for _s in dense_shapes]
4555     if not isinstance(output_types, (list, tuple)):
4556       raise TypeError(
4557           "Expected list for 'output_types' argument to "
4558           "'parse_example_dataset' Op, not %r." % output_types)
4559     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4560     if not isinstance(output_shapes, (list, tuple)):
4561       raise TypeError(
4562           "Expected list for 'output_shapes' argument to "
4563           "'parse_example_dataset' Op, not %r." % output_shapes)
4564     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4565     _, _, _op = _op_def_lib._apply_op_helper(
4566         "ParseExampleDataset", input_dataset=input_dataset,
4567         num_parallel_calls=num_parallel_calls, dense_defaults=dense_defaults,
4568         sparse_keys=sparse_keys, dense_keys=dense_keys,
4569         sparse_types=sparse_types, dense_shapes=dense_shapes,
4570         output_types=output_types, output_shapes=output_shapes, name=name)
4571     _result = _op.outputs[:]
4572     _inputs_flat = _op.inputs
4573     _attrs = ("sparse_keys", _op.get_attr("sparse_keys"), "dense_keys",
4574               _op.get_attr("dense_keys"), "sparse_types",
4575               _op.get_attr("sparse_types"), "Tdense", _op.get_attr("Tdense"),
4576               "dense_shapes", _op.get_attr("dense_shapes"), "output_types",
4577               _op.get_attr("output_types"), "output_shapes",
4578               _op.get_attr("output_shapes"))
4579     _execute.record_gradient(
4580       "ParseExampleDataset", _inputs_flat, _attrs, _result, name)
4581     _result, = _result
4582     return _result
4583 
4584   else:
4585     try:
4586       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
4587         _ctx._context_handle, _ctx._eager_context.device_name,
4588         "ParseExampleDataset", name, _ctx._post_execution_callbacks,
4589         input_dataset, num_parallel_calls, dense_defaults, "sparse_keys",
4590         sparse_keys, "dense_keys", dense_keys, "sparse_types", sparse_types,
4591         "dense_shapes", dense_shapes, "output_types", output_types,
4592         "output_shapes", output_shapes)
4593       return _result
4594     except _core._FallbackException:
4595       return parse_example_dataset_eager_fallback(
4596           input_dataset, num_parallel_calls, dense_defaults,
4597           sparse_keys=sparse_keys, dense_keys=dense_keys,
4598           sparse_types=sparse_types, dense_shapes=dense_shapes,
4599           output_types=output_types, output_shapes=output_shapes, name=name,
4600           ctx=_ctx)
4601     except _core._NotOkStatusException as e:
4602       if name is not None:
4603         message = e.message + " name: " + name
4604       else:
4605         message = e.message
4606       _six.raise_from(_core._status_to_exception(e.code, message), None)
4607 
4608 
4609 def parse_example_dataset_eager_fallback(input_dataset, num_parallel_calls, dense_defaults, sparse_keys, dense_keys, sparse_types, dense_shapes, output_types, output_shapes, name=None, ctx=None):
4610   r"""This is the slowpath function for Eager mode.
4611   This is for function parse_example_dataset
4612   """
4613   _ctx = ctx if ctx else _context.context()
4614   if not isinstance(sparse_keys, (list, tuple)):
4615     raise TypeError(
4616         "Expected list for 'sparse_keys' argument to "
4617         "'parse_example_dataset' Op, not %r." % sparse_keys)
4618   sparse_keys = [_execute.make_str(_s, "sparse_keys") for _s in sparse_keys]
4619   if not isinstance(dense_keys, (list, tuple)):
4620     raise TypeError(
4621         "Expected list for 'dense_keys' argument to "
4622         "'parse_example_dataset' Op, not %r." % dense_keys)
4623   dense_keys = [_execute.make_str(_s, "dense_keys") for _s in dense_keys]
4624   if not isinstance(sparse_types, (list, tuple)):
4625     raise TypeError(
4626         "Expected list for 'sparse_types' argument to "
4627         "'parse_example_dataset' Op, not %r." % sparse_types)
4628   sparse_types = [_execute.make_type(_t, "sparse_types") for _t in sparse_types]
4629   if not isinstance(dense_shapes, (list, tuple)):
4630     raise TypeError(
4631         "Expected list for 'dense_shapes' argument to "
4632         "'parse_example_dataset' Op, not %r." % dense_shapes)
4633   dense_shapes = [_execute.make_shape(_s, "dense_shapes") for _s in dense_shapes]
4634   if not isinstance(output_types, (list, tuple)):
4635     raise TypeError(
4636         "Expected list for 'output_types' argument to "
4637         "'parse_example_dataset' Op, not %r." % output_types)
4638   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4639   if not isinstance(output_shapes, (list, tuple)):
4640     raise TypeError(
4641         "Expected list for 'output_shapes' argument to "
4642         "'parse_example_dataset' Op, not %r." % output_shapes)
4643   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4644   _attr_Tdense, dense_defaults = _execute.convert_to_mixed_eager_tensors(dense_defaults, _ctx)
4645   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4646   num_parallel_calls = _ops.convert_to_tensor(num_parallel_calls, _dtypes.int64)
4647   _inputs_flat = [input_dataset, num_parallel_calls] + list(dense_defaults)
4648   _attrs = ("sparse_keys", sparse_keys, "dense_keys", dense_keys,
4649   "sparse_types", sparse_types, "Tdense", _attr_Tdense, "dense_shapes",
4650   dense_shapes, "output_types", output_types, "output_shapes", output_shapes)
4651   _result = _execute.execute(b"ParseExampleDataset", 1, inputs=_inputs_flat,
4652                              attrs=_attrs, ctx=_ctx, name=name)
4653   _execute.record_gradient(
4654       "ParseExampleDataset", _inputs_flat, _attrs, _result, name)
4655   _result, = _result
4656   return _result
4657 
4658 
4659 def prefetch_dataset(input_dataset, buffer_size, output_types, output_shapes, name=None):
4660   r"""Creates a dataset that asynchronously prefetches elements from `input_dataset`.
4661 
4662   Args:
4663     input_dataset: A `Tensor` of type `variant`.
4664     buffer_size: A `Tensor` of type `int64`.
4665       The maximum number of elements to buffer in an iterator over
4666       this dataset.
4667     output_types: A list of `tf.DTypes` that has length `>= 1`.
4668     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4669     name: A name for the operation (optional).
4670 
4671   Returns:
4672     A `Tensor` of type `variant`.
4673   """
4674   _ctx = _context._context
4675   if _ctx is None or not _ctx._eager_context.is_eager:
4676     if not isinstance(output_types, (list, tuple)):
4677       raise TypeError(
4678           "Expected list for 'output_types' argument to "
4679           "'prefetch_dataset' Op, not %r." % output_types)
4680     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4681     if not isinstance(output_shapes, (list, tuple)):
4682       raise TypeError(
4683           "Expected list for 'output_shapes' argument to "
4684           "'prefetch_dataset' Op, not %r." % output_shapes)
4685     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4686     _, _, _op = _op_def_lib._apply_op_helper(
4687         "PrefetchDataset", input_dataset=input_dataset,
4688         buffer_size=buffer_size, output_types=output_types,
4689         output_shapes=output_shapes, name=name)
4690     _result = _op.outputs[:]
4691     _inputs_flat = _op.inputs
4692     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
4693               _op.get_attr("output_shapes"))
4694     _execute.record_gradient(
4695       "PrefetchDataset", _inputs_flat, _attrs, _result, name)
4696     _result, = _result
4697     return _result
4698 
4699   else:
4700     try:
4701       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
4702         _ctx._context_handle, _ctx._eager_context.device_name,
4703         "PrefetchDataset", name, _ctx._post_execution_callbacks,
4704         input_dataset, buffer_size, "output_types", output_types,
4705         "output_shapes", output_shapes)
4706       return _result
4707     except _core._FallbackException:
4708       return prefetch_dataset_eager_fallback(
4709           input_dataset, buffer_size, output_types=output_types,
4710           output_shapes=output_shapes, name=name, ctx=_ctx)
4711     except _core._NotOkStatusException as e:
4712       if name is not None:
4713         message = e.message + " name: " + name
4714       else:
4715         message = e.message
4716       _six.raise_from(_core._status_to_exception(e.code, message), None)
4717 
4718 
4719 def prefetch_dataset_eager_fallback(input_dataset, buffer_size, output_types, output_shapes, name=None, ctx=None):
4720   r"""This is the slowpath function for Eager mode.
4721   This is for function prefetch_dataset
4722   """
4723   _ctx = ctx if ctx else _context.context()
4724   if not isinstance(output_types, (list, tuple)):
4725     raise TypeError(
4726         "Expected list for 'output_types' argument to "
4727         "'prefetch_dataset' Op, not %r." % output_types)
4728   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4729   if not isinstance(output_shapes, (list, tuple)):
4730     raise TypeError(
4731         "Expected list for 'output_shapes' argument to "
4732         "'prefetch_dataset' Op, not %r." % output_shapes)
4733   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4734   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4735   buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64)
4736   _inputs_flat = [input_dataset, buffer_size]
4737   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
4738   _result = _execute.execute(b"PrefetchDataset", 1, inputs=_inputs_flat,
4739                              attrs=_attrs, ctx=_ctx, name=name)
4740   _execute.record_gradient(
4741       "PrefetchDataset", _inputs_flat, _attrs, _result, name)
4742   _result, = _result
4743   return _result
4744 
4745 
4746 def prepend_from_queue_and_padded_batch_dataset(input_dataset, batch_size, padded_shapes, padding_values, output_shapes, name=None):
4747   r"""TODO: add doc.
4748 
4749   Args:
4750     input_dataset: A `Tensor` of type `variant`.
4751     batch_size: A `Tensor` of type `int64`.
4752     padded_shapes: A list of at least 1 `Tensor` objects with type `int64`.
4753     padding_values: A list of `Tensor` objects.
4754     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4755     name: A name for the operation (optional).
4756 
4757   Returns:
4758     A `Tensor` of type `variant`.
4759   """
4760   _ctx = _context._context
4761   if _ctx is None or not _ctx._eager_context.is_eager:
4762     if not isinstance(padded_shapes, (list, tuple)):
4763       raise TypeError(
4764           "Expected list for 'padded_shapes' argument to "
4765           "'prepend_from_queue_and_padded_batch_dataset' Op, not %r." % padded_shapes)
4766     _attr_N = len(padded_shapes)
4767     if not isinstance(output_shapes, (list, tuple)):
4768       raise TypeError(
4769           "Expected list for 'output_shapes' argument to "
4770           "'prepend_from_queue_and_padded_batch_dataset' Op, not %r." % output_shapes)
4771     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4772     _, _, _op = _op_def_lib._apply_op_helper(
4773         "PrependFromQueueAndPaddedBatchDataset", input_dataset=input_dataset,
4774         batch_size=batch_size, padded_shapes=padded_shapes,
4775         padding_values=padding_values, output_shapes=output_shapes, name=name)
4776     _result = _op.outputs[:]
4777     _inputs_flat = _op.inputs
4778     _attrs = ("Toutput_types", _op.get_attr("Toutput_types"), "output_shapes",
4779               _op.get_attr("output_shapes"), "N", _op.get_attr("N"))
4780     _execute.record_gradient(
4781       "PrependFromQueueAndPaddedBatchDataset", _inputs_flat, _attrs, _result, name)
4782     _result, = _result
4783     return _result
4784 
4785   else:
4786     try:
4787       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
4788         _ctx._context_handle, _ctx._eager_context.device_name,
4789         "PrependFromQueueAndPaddedBatchDataset", name,
4790         _ctx._post_execution_callbacks, input_dataset, batch_size,
4791         padded_shapes, padding_values, "output_shapes", output_shapes)
4792       return _result
4793     except _core._FallbackException:
4794       return prepend_from_queue_and_padded_batch_dataset_eager_fallback(
4795           input_dataset, batch_size, padded_shapes, padding_values,
4796           output_shapes=output_shapes, name=name, ctx=_ctx)
4797     except _core._NotOkStatusException as e:
4798       if name is not None:
4799         message = e.message + " name: " + name
4800       else:
4801         message = e.message
4802       _six.raise_from(_core._status_to_exception(e.code, message), None)
4803 
4804 
4805 def prepend_from_queue_and_padded_batch_dataset_eager_fallback(input_dataset, batch_size, padded_shapes, padding_values, output_shapes, name=None, ctx=None):
4806   r"""This is the slowpath function for Eager mode.
4807   This is for function prepend_from_queue_and_padded_batch_dataset
4808   """
4809   _ctx = ctx if ctx else _context.context()
4810   if not isinstance(padded_shapes, (list, tuple)):
4811     raise TypeError(
4812         "Expected list for 'padded_shapes' argument to "
4813         "'prepend_from_queue_and_padded_batch_dataset' Op, not %r." % padded_shapes)
4814   _attr_N = len(padded_shapes)
4815   if not isinstance(output_shapes, (list, tuple)):
4816     raise TypeError(
4817         "Expected list for 'output_shapes' argument to "
4818         "'prepend_from_queue_and_padded_batch_dataset' Op, not %r." % output_shapes)
4819   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4820   _attr_Toutput_types, padding_values = _execute.convert_to_mixed_eager_tensors(padding_values, _ctx)
4821   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
4822   batch_size = _ops.convert_to_tensor(batch_size, _dtypes.int64)
4823   padded_shapes = _ops.convert_n_to_tensor(padded_shapes, _dtypes.int64)
4824   _inputs_flat = [input_dataset, batch_size] + list(padded_shapes) + list(padding_values)
4825   _attrs = ("Toutput_types", _attr_Toutput_types, "output_shapes",
4826   output_shapes, "N", _attr_N)
4827   _result = _execute.execute(b"PrependFromQueueAndPaddedBatchDataset", 1,
4828                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
4829                              name=name)
4830   _execute.record_gradient(
4831       "PrependFromQueueAndPaddedBatchDataset", _inputs_flat, _attrs, _result, name)
4832   _result, = _result
4833   return _result
4834 
4835 
4836 def random_dataset(seed, seed2, output_types, output_shapes, name=None):
4837   r"""Creates a Dataset that returns pseudorandom numbers.
4838 
4839   Args:
4840     seed: A `Tensor` of type `int64`.
4841       A scalar seed for the random number generator. If either seed or
4842       seed2 is set to be non-zero, the random number generator is seeded
4843       by the given seed.  Otherwise, a random seed is used.
4844     seed2: A `Tensor` of type `int64`.
4845       A second scalar seed to avoid seed collision.
4846     output_types: A list of `tf.DTypes` that has length `>= 1`.
4847     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4848     name: A name for the operation (optional).
4849 
4850   Returns:
4851     A `Tensor` of type `variant`.
4852   """
4853   _ctx = _context._context
4854   if _ctx is None or not _ctx._eager_context.is_eager:
4855     if not isinstance(output_types, (list, tuple)):
4856       raise TypeError(
4857           "Expected list for 'output_types' argument to "
4858           "'random_dataset' Op, not %r." % output_types)
4859     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4860     if not isinstance(output_shapes, (list, tuple)):
4861       raise TypeError(
4862           "Expected list for 'output_shapes' argument to "
4863           "'random_dataset' Op, not %r." % output_shapes)
4864     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4865     _, _, _op = _op_def_lib._apply_op_helper(
4866         "RandomDataset", seed=seed, seed2=seed2, output_types=output_types,
4867         output_shapes=output_shapes, name=name)
4868     _result = _op.outputs[:]
4869     _inputs_flat = _op.inputs
4870     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
4871               _op.get_attr("output_shapes"))
4872     _execute.record_gradient(
4873       "RandomDataset", _inputs_flat, _attrs, _result, name)
4874     _result, = _result
4875     return _result
4876 
4877   else:
4878     try:
4879       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
4880         _ctx._context_handle, _ctx._eager_context.device_name,
4881         "RandomDataset", name, _ctx._post_execution_callbacks, seed, seed2,
4882         "output_types", output_types, "output_shapes", output_shapes)
4883       return _result
4884     except _core._FallbackException:
4885       return random_dataset_eager_fallback(
4886           seed, seed2, output_types=output_types, output_shapes=output_shapes,
4887           name=name, ctx=_ctx)
4888     except _core._NotOkStatusException as e:
4889       if name is not None:
4890         message = e.message + " name: " + name
4891       else:
4892         message = e.message
4893       _six.raise_from(_core._status_to_exception(e.code, message), None)
4894 
4895 
4896 def random_dataset_eager_fallback(seed, seed2, output_types, output_shapes, name=None, ctx=None):
4897   r"""This is the slowpath function for Eager mode.
4898   This is for function random_dataset
4899   """
4900   _ctx = ctx if ctx else _context.context()
4901   if not isinstance(output_types, (list, tuple)):
4902     raise TypeError(
4903         "Expected list for 'output_types' argument to "
4904         "'random_dataset' Op, not %r." % output_types)
4905   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4906   if not isinstance(output_shapes, (list, tuple)):
4907     raise TypeError(
4908         "Expected list for 'output_shapes' argument to "
4909         "'random_dataset' Op, not %r." % output_shapes)
4910   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4911   seed = _ops.convert_to_tensor(seed, _dtypes.int64)
4912   seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64)
4913   _inputs_flat = [seed, seed2]
4914   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
4915   _result = _execute.execute(b"RandomDataset", 1, inputs=_inputs_flat,
4916                              attrs=_attrs, ctx=_ctx, name=name)
4917   _execute.record_gradient(
4918       "RandomDataset", _inputs_flat, _attrs, _result, name)
4919   _result, = _result
4920   return _result
4921 
4922 
4923 def range_dataset(start, stop, step, output_types, output_shapes, name=None):
4924   r"""Creates a dataset with a range of values. Corresponds to python's xrange.
4925 
4926   Args:
4927     start: A `Tensor` of type `int64`.
4928       corresponds to start in python's xrange().
4929     stop: A `Tensor` of type `int64`.
4930       corresponds to stop in python's xrange().
4931     step: A `Tensor` of type `int64`.
4932       corresponds to step in python's xrange().
4933     output_types: A list of `tf.DTypes` that has length `>= 1`.
4934     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
4935     name: A name for the operation (optional).
4936 
4937   Returns:
4938     A `Tensor` of type `variant`.
4939   """
4940   _ctx = _context._context
4941   if _ctx is None or not _ctx._eager_context.is_eager:
4942     if not isinstance(output_types, (list, tuple)):
4943       raise TypeError(
4944           "Expected list for 'output_types' argument to "
4945           "'range_dataset' Op, not %r." % output_types)
4946     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4947     if not isinstance(output_shapes, (list, tuple)):
4948       raise TypeError(
4949           "Expected list for 'output_shapes' argument to "
4950           "'range_dataset' Op, not %r." % output_shapes)
4951     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4952     _, _, _op = _op_def_lib._apply_op_helper(
4953         "RangeDataset", start=start, stop=stop, step=step,
4954         output_types=output_types, output_shapes=output_shapes, name=name)
4955     _result = _op.outputs[:]
4956     _inputs_flat = _op.inputs
4957     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
4958               _op.get_attr("output_shapes"))
4959     _execute.record_gradient(
4960       "RangeDataset", _inputs_flat, _attrs, _result, name)
4961     _result, = _result
4962     return _result
4963 
4964   else:
4965     try:
4966       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
4967         _ctx._context_handle, _ctx._eager_context.device_name, "RangeDataset",
4968         name, _ctx._post_execution_callbacks, start, stop, step,
4969         "output_types", output_types, "output_shapes", output_shapes)
4970       return _result
4971     except _core._FallbackException:
4972       return range_dataset_eager_fallback(
4973           start, stop, step, output_types=output_types,
4974           output_shapes=output_shapes, name=name, ctx=_ctx)
4975     except _core._NotOkStatusException as e:
4976       if name is not None:
4977         message = e.message + " name: " + name
4978       else:
4979         message = e.message
4980       _six.raise_from(_core._status_to_exception(e.code, message), None)
4981 
4982 
4983 def range_dataset_eager_fallback(start, stop, step, output_types, output_shapes, name=None, ctx=None):
4984   r"""This is the slowpath function for Eager mode.
4985   This is for function range_dataset
4986   """
4987   _ctx = ctx if ctx else _context.context()
4988   if not isinstance(output_types, (list, tuple)):
4989     raise TypeError(
4990         "Expected list for 'output_types' argument to "
4991         "'range_dataset' Op, not %r." % output_types)
4992   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
4993   if not isinstance(output_shapes, (list, tuple)):
4994     raise TypeError(
4995         "Expected list for 'output_shapes' argument to "
4996         "'range_dataset' Op, not %r." % output_shapes)
4997   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
4998   start = _ops.convert_to_tensor(start, _dtypes.int64)
4999   stop = _ops.convert_to_tensor(stop, _dtypes.int64)
5000   step = _ops.convert_to_tensor(step, _dtypes.int64)
5001   _inputs_flat = [start, stop, step]
5002   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
5003   _result = _execute.execute(b"RangeDataset", 1, inputs=_inputs_flat,
5004                              attrs=_attrs, ctx=_ctx, name=name)
5005   _execute.record_gradient(
5006       "RangeDataset", _inputs_flat, _attrs, _result, name)
5007   _result, = _result
5008   return _result
5009 
5010 
5011 def reduce_dataset(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism=True, name=None):
5012   r"""Reduces the input dataset to a singleton using a reduce function.
5013 
5014   Args:
5015     input_dataset: A `Tensor` of type `variant`.
5016       A variant tensor representing the input dataset.
5017     initial_state: A list of `Tensor` objects.
5018       A nested structure of tensors, representing the initial state of the
5019       transformation.
5020     other_arguments: A list of `Tensor` objects.
5021     f: A function decorated with @Defun.
5022       A function that maps `(old_state, input_element)` to `new_state`. It must take
5023       two arguments and return a nested structures of tensors. The structure of
5024       `new_state` must match the structure of `initial_state`.
5025     output_types: A list of `tf.DTypes` that has length `>= 1`.
5026     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5027     use_inter_op_parallelism: An optional `bool`. Defaults to `True`.
5028     name: A name for the operation (optional).
5029 
5030   Returns:
5031     A list of `Tensor` objects of type `output_types`.
5032   """
5033   _ctx = _context._context
5034   if _ctx is None or not _ctx._eager_context.is_eager:
5035     if not isinstance(output_types, (list, tuple)):
5036       raise TypeError(
5037           "Expected list for 'output_types' argument to "
5038           "'reduce_dataset' Op, not %r." % output_types)
5039     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5040     if not isinstance(output_shapes, (list, tuple)):
5041       raise TypeError(
5042           "Expected list for 'output_shapes' argument to "
5043           "'reduce_dataset' Op, not %r." % output_shapes)
5044     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5045     if use_inter_op_parallelism is None:
5046       use_inter_op_parallelism = True
5047     use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism")
5048     _, _, _op = _op_def_lib._apply_op_helper(
5049         "ReduceDataset", input_dataset=input_dataset,
5050         initial_state=initial_state, other_arguments=other_arguments, f=f,
5051         output_types=output_types, output_shapes=output_shapes,
5052         use_inter_op_parallelism=use_inter_op_parallelism, name=name)
5053     _result = _op.outputs[:]
5054     _inputs_flat = _op.inputs
5055     _attrs = ("f", _op.get_attr("f"), "Tstate", _op.get_attr("Tstate"),
5056               "Targuments", _op.get_attr("Targuments"), "output_types",
5057               _op.get_attr("output_types"), "output_shapes",
5058               _op.get_attr("output_shapes"), "use_inter_op_parallelism",
5059               _op.get_attr("use_inter_op_parallelism"))
5060     _execute.record_gradient(
5061       "ReduceDataset", _inputs_flat, _attrs, _result, name)
5062     return _result
5063 
5064   else:
5065     try:
5066       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5067         _ctx._context_handle, _ctx._eager_context.device_name,
5068         "ReduceDataset", name, _ctx._post_execution_callbacks, input_dataset,
5069         initial_state, other_arguments, "f", f, "output_types", output_types,
5070         "output_shapes", output_shapes, "use_inter_op_parallelism",
5071         use_inter_op_parallelism)
5072       return _result
5073     except _core._FallbackException:
5074       return reduce_dataset_eager_fallback(
5075           input_dataset, initial_state, other_arguments, f=f,
5076           output_types=output_types, output_shapes=output_shapes,
5077           use_inter_op_parallelism=use_inter_op_parallelism, name=name,
5078           ctx=_ctx)
5079     except _core._NotOkStatusException as e:
5080       if name is not None:
5081         message = e.message + " name: " + name
5082       else:
5083         message = e.message
5084       _six.raise_from(_core._status_to_exception(e.code, message), None)
5085 
5086 
5087 def reduce_dataset_eager_fallback(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, use_inter_op_parallelism=True, name=None, ctx=None):
5088   r"""This is the slowpath function for Eager mode.
5089   This is for function reduce_dataset
5090   """
5091   _ctx = ctx if ctx else _context.context()
5092   if not isinstance(output_types, (list, tuple)):
5093     raise TypeError(
5094         "Expected list for 'output_types' argument to "
5095         "'reduce_dataset' Op, not %r." % output_types)
5096   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5097   if not isinstance(output_shapes, (list, tuple)):
5098     raise TypeError(
5099         "Expected list for 'output_shapes' argument to "
5100         "'reduce_dataset' Op, not %r." % output_shapes)
5101   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5102   if use_inter_op_parallelism is None:
5103     use_inter_op_parallelism = True
5104   use_inter_op_parallelism = _execute.make_bool(use_inter_op_parallelism, "use_inter_op_parallelism")
5105   _attr_Tstate, initial_state = _execute.convert_to_mixed_eager_tensors(initial_state, _ctx)
5106   _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
5107   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5108   _inputs_flat = [input_dataset] + list(initial_state) + list(other_arguments)
5109   _attrs = ("f", f, "Tstate", _attr_Tstate, "Targuments", _attr_Targuments,
5110   "output_types", output_types, "output_shapes", output_shapes,
5111   "use_inter_op_parallelism", use_inter_op_parallelism)
5112   _result = _execute.execute(b"ReduceDataset", len(output_types),
5113                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
5114                              name=name)
5115   _execute.record_gradient(
5116       "ReduceDataset", _inputs_flat, _attrs, _result, name)
5117   return _result
5118 
5119 
5120 def repeat_dataset(input_dataset, count, output_types, output_shapes, name=None):
5121   r"""Creates a dataset that emits the outputs of `input_dataset` `count` times.
5122 
5123   Args:
5124     input_dataset: A `Tensor` of type `variant`.
5125     count: A `Tensor` of type `int64`.
5126       A scalar representing the number of times that `input_dataset` should
5127       be repeated. A value of `-1` indicates that it should be repeated infinitely.
5128     output_types: A list of `tf.DTypes` that has length `>= 1`.
5129     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5130     name: A name for the operation (optional).
5131 
5132   Returns:
5133     A `Tensor` of type `variant`.
5134   """
5135   _ctx = _context._context
5136   if _ctx is None or not _ctx._eager_context.is_eager:
5137     if not isinstance(output_types, (list, tuple)):
5138       raise TypeError(
5139           "Expected list for 'output_types' argument to "
5140           "'repeat_dataset' Op, not %r." % output_types)
5141     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5142     if not isinstance(output_shapes, (list, tuple)):
5143       raise TypeError(
5144           "Expected list for 'output_shapes' argument to "
5145           "'repeat_dataset' Op, not %r." % output_shapes)
5146     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5147     _, _, _op = _op_def_lib._apply_op_helper(
5148         "RepeatDataset", input_dataset=input_dataset, count=count,
5149         output_types=output_types, output_shapes=output_shapes, name=name)
5150     _result = _op.outputs[:]
5151     _inputs_flat = _op.inputs
5152     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
5153               _op.get_attr("output_shapes"))
5154     _execute.record_gradient(
5155       "RepeatDataset", _inputs_flat, _attrs, _result, name)
5156     _result, = _result
5157     return _result
5158 
5159   else:
5160     try:
5161       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5162         _ctx._context_handle, _ctx._eager_context.device_name,
5163         "RepeatDataset", name, _ctx._post_execution_callbacks, input_dataset,
5164         count, "output_types", output_types, "output_shapes", output_shapes)
5165       return _result
5166     except _core._FallbackException:
5167       return repeat_dataset_eager_fallback(
5168           input_dataset, count, output_types=output_types,
5169           output_shapes=output_shapes, name=name, ctx=_ctx)
5170     except _core._NotOkStatusException as e:
5171       if name is not None:
5172         message = e.message + " name: " + name
5173       else:
5174         message = e.message
5175       _six.raise_from(_core._status_to_exception(e.code, message), None)
5176 
5177 
5178 def repeat_dataset_eager_fallback(input_dataset, count, output_types, output_shapes, name=None, ctx=None):
5179   r"""This is the slowpath function for Eager mode.
5180   This is for function repeat_dataset
5181   """
5182   _ctx = ctx if ctx else _context.context()
5183   if not isinstance(output_types, (list, tuple)):
5184     raise TypeError(
5185         "Expected list for 'output_types' argument to "
5186         "'repeat_dataset' Op, not %r." % output_types)
5187   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5188   if not isinstance(output_shapes, (list, tuple)):
5189     raise TypeError(
5190         "Expected list for 'output_shapes' argument to "
5191         "'repeat_dataset' Op, not %r." % output_shapes)
5192   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5193   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5194   count = _ops.convert_to_tensor(count, _dtypes.int64)
5195   _inputs_flat = [input_dataset, count]
5196   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
5197   _result = _execute.execute(b"RepeatDataset", 1, inputs=_inputs_flat,
5198                              attrs=_attrs, ctx=_ctx, name=name)
5199   _execute.record_gradient(
5200       "RepeatDataset", _inputs_flat, _attrs, _result, name)
5201   _result, = _result
5202   return _result
5203 
5204 
5205 def scan_dataset(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, name=None):
5206   r"""Creates a dataset successively reduces `f` over the elements of `input_dataset`.
5207 
5208   Args:
5209     input_dataset: A `Tensor` of type `variant`.
5210     initial_state: A list of `Tensor` objects.
5211     other_arguments: A list of `Tensor` objects.
5212     f: A function decorated with @Defun.
5213     output_types: A list of `tf.DTypes` that has length `>= 1`.
5214     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5215     name: A name for the operation (optional).
5216 
5217   Returns:
5218     A `Tensor` of type `variant`.
5219   """
5220   _ctx = _context._context
5221   if _ctx is None or not _ctx._eager_context.is_eager:
5222     if not isinstance(output_types, (list, tuple)):
5223       raise TypeError(
5224           "Expected list for 'output_types' argument to "
5225           "'scan_dataset' Op, not %r." % output_types)
5226     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5227     if not isinstance(output_shapes, (list, tuple)):
5228       raise TypeError(
5229           "Expected list for 'output_shapes' argument to "
5230           "'scan_dataset' Op, not %r." % output_shapes)
5231     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5232     _, _, _op = _op_def_lib._apply_op_helper(
5233         "ScanDataset", input_dataset=input_dataset,
5234         initial_state=initial_state, other_arguments=other_arguments, f=f,
5235         output_types=output_types, output_shapes=output_shapes, name=name)
5236     _result = _op.outputs[:]
5237     _inputs_flat = _op.inputs
5238     _attrs = ("f", _op.get_attr("f"), "Tstate", _op.get_attr("Tstate"),
5239               "Targuments", _op.get_attr("Targuments"), "output_types",
5240               _op.get_attr("output_types"), "output_shapes",
5241               _op.get_attr("output_shapes"))
5242     _execute.record_gradient(
5243       "ScanDataset", _inputs_flat, _attrs, _result, name)
5244     _result, = _result
5245     return _result
5246 
5247   else:
5248     try:
5249       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5250         _ctx._context_handle, _ctx._eager_context.device_name, "ScanDataset",
5251         name, _ctx._post_execution_callbacks, input_dataset, initial_state,
5252         other_arguments, "f", f, "output_types", output_types,
5253         "output_shapes", output_shapes)
5254       return _result
5255     except _core._FallbackException:
5256       return scan_dataset_eager_fallback(
5257           input_dataset, initial_state, other_arguments, f=f,
5258           output_types=output_types, output_shapes=output_shapes, name=name,
5259           ctx=_ctx)
5260     except _core._NotOkStatusException as e:
5261       if name is not None:
5262         message = e.message + " name: " + name
5263       else:
5264         message = e.message
5265       _six.raise_from(_core._status_to_exception(e.code, message), None)
5266 
5267 
5268 def scan_dataset_eager_fallback(input_dataset, initial_state, other_arguments, f, output_types, output_shapes, name=None, ctx=None):
5269   r"""This is the slowpath function for Eager mode.
5270   This is for function scan_dataset
5271   """
5272   _ctx = ctx if ctx else _context.context()
5273   if not isinstance(output_types, (list, tuple)):
5274     raise TypeError(
5275         "Expected list for 'output_types' argument to "
5276         "'scan_dataset' Op, not %r." % output_types)
5277   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5278   if not isinstance(output_shapes, (list, tuple)):
5279     raise TypeError(
5280         "Expected list for 'output_shapes' argument to "
5281         "'scan_dataset' Op, not %r." % output_shapes)
5282   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5283   _attr_Tstate, initial_state = _execute.convert_to_mixed_eager_tensors(initial_state, _ctx)
5284   _attr_Targuments, other_arguments = _execute.convert_to_mixed_eager_tensors(other_arguments, _ctx)
5285   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5286   _inputs_flat = [input_dataset] + list(initial_state) + list(other_arguments)
5287   _attrs = ("f", f, "Tstate", _attr_Tstate, "Targuments", _attr_Targuments,
5288   "output_types", output_types, "output_shapes", output_shapes)
5289   _result = _execute.execute(b"ScanDataset", 1, inputs=_inputs_flat,
5290                              attrs=_attrs, ctx=_ctx, name=name)
5291   _execute.record_gradient(
5292       "ScanDataset", _inputs_flat, _attrs, _result, name)
5293   _result, = _result
5294   return _result
5295 
5296 
5297 def serialize_iterator(resource_handle, name=None):
5298   r"""Converts the given `resource_handle` representing an iterator to a variant tensor.
5299 
5300   Args:
5301     resource_handle: A `Tensor` of type `resource`.
5302       A handle to an iterator resource.
5303     name: A name for the operation (optional).
5304 
5305   Returns:
5306     A `Tensor` of type `variant`.
5307   """
5308   _ctx = _context._context
5309   if _ctx is None or not _ctx._eager_context.is_eager:
5310     _, _, _op = _op_def_lib._apply_op_helper(
5311         "SerializeIterator", resource_handle=resource_handle, name=name)
5312     _result = _op.outputs[:]
5313     _inputs_flat = _op.inputs
5314     _attrs = None
5315     _execute.record_gradient(
5316       "SerializeIterator", _inputs_flat, _attrs, _result, name)
5317     _result, = _result
5318     return _result
5319 
5320   else:
5321     try:
5322       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5323         _ctx._context_handle, _ctx._eager_context.device_name,
5324         "SerializeIterator", name, _ctx._post_execution_callbacks,
5325         resource_handle)
5326       return _result
5327     except _core._FallbackException:
5328       return serialize_iterator_eager_fallback(
5329           resource_handle, name=name, ctx=_ctx)
5330     except _core._NotOkStatusException as e:
5331       if name is not None:
5332         message = e.message + " name: " + name
5333       else:
5334         message = e.message
5335       _six.raise_from(_core._status_to_exception(e.code, message), None)
5336 
5337 
5338 def serialize_iterator_eager_fallback(resource_handle, name=None, ctx=None):
5339   r"""This is the slowpath function for Eager mode.
5340   This is for function serialize_iterator
5341   """
5342   _ctx = ctx if ctx else _context.context()
5343   resource_handle = _ops.convert_to_tensor(resource_handle, _dtypes.resource)
5344   _inputs_flat = [resource_handle]
5345   _attrs = None
5346   _result = _execute.execute(b"SerializeIterator", 1, inputs=_inputs_flat,
5347                              attrs=_attrs, ctx=_ctx, name=name)
5348   _execute.record_gradient(
5349       "SerializeIterator", _inputs_flat, _attrs, _result, name)
5350   _result, = _result
5351   return _result
5352 
5353 
5354 def set_stats_aggregator_dataset(input_dataset, stats_aggregator, output_types, output_shapes, name=None):
5355   r"""TODO: add doc.
5356 
5357   Args:
5358     input_dataset: A `Tensor` of type `variant`.
5359     stats_aggregator: A `Tensor` of type `resource`.
5360     output_types: A list of `tf.DTypes` that has length `>= 1`.
5361     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5362     name: A name for the operation (optional).
5363 
5364   Returns:
5365     A `Tensor` of type `variant`.
5366   """
5367   _ctx = _context._context
5368   if _ctx is None or not _ctx._eager_context.is_eager:
5369     if not isinstance(output_types, (list, tuple)):
5370       raise TypeError(
5371           "Expected list for 'output_types' argument to "
5372           "'set_stats_aggregator_dataset' Op, not %r." % output_types)
5373     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5374     if not isinstance(output_shapes, (list, tuple)):
5375       raise TypeError(
5376           "Expected list for 'output_shapes' argument to "
5377           "'set_stats_aggregator_dataset' Op, not %r." % output_shapes)
5378     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5379     _, _, _op = _op_def_lib._apply_op_helper(
5380         "SetStatsAggregatorDataset", input_dataset=input_dataset,
5381         stats_aggregator=stats_aggregator, output_types=output_types,
5382         output_shapes=output_shapes, name=name)
5383     _result = _op.outputs[:]
5384     _inputs_flat = _op.inputs
5385     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
5386               _op.get_attr("output_shapes"))
5387     _execute.record_gradient(
5388       "SetStatsAggregatorDataset", _inputs_flat, _attrs, _result, name)
5389     _result, = _result
5390     return _result
5391 
5392   else:
5393     try:
5394       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5395         _ctx._context_handle, _ctx._eager_context.device_name,
5396         "SetStatsAggregatorDataset", name, _ctx._post_execution_callbacks,
5397         input_dataset, stats_aggregator, "output_types", output_types,
5398         "output_shapes", output_shapes)
5399       return _result
5400     except _core._FallbackException:
5401       return set_stats_aggregator_dataset_eager_fallback(
5402           input_dataset, stats_aggregator, output_types=output_types,
5403           output_shapes=output_shapes, name=name, ctx=_ctx)
5404     except _core._NotOkStatusException as e:
5405       if name is not None:
5406         message = e.message + " name: " + name
5407       else:
5408         message = e.message
5409       _six.raise_from(_core._status_to_exception(e.code, message), None)
5410 
5411 
5412 def set_stats_aggregator_dataset_eager_fallback(input_dataset, stats_aggregator, output_types, output_shapes, name=None, ctx=None):
5413   r"""This is the slowpath function for Eager mode.
5414   This is for function set_stats_aggregator_dataset
5415   """
5416   _ctx = ctx if ctx else _context.context()
5417   if not isinstance(output_types, (list, tuple)):
5418     raise TypeError(
5419         "Expected list for 'output_types' argument to "
5420         "'set_stats_aggregator_dataset' Op, not %r." % output_types)
5421   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5422   if not isinstance(output_shapes, (list, tuple)):
5423     raise TypeError(
5424         "Expected list for 'output_shapes' argument to "
5425         "'set_stats_aggregator_dataset' Op, not %r." % output_shapes)
5426   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5427   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5428   stats_aggregator = _ops.convert_to_tensor(stats_aggregator, _dtypes.resource)
5429   _inputs_flat = [input_dataset, stats_aggregator]
5430   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
5431   _result = _execute.execute(b"SetStatsAggregatorDataset", 1,
5432                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
5433                              name=name)
5434   _execute.record_gradient(
5435       "SetStatsAggregatorDataset", _inputs_flat, _attrs, _result, name)
5436   _result, = _result
5437   return _result
5438 
5439 
5440 def shuffle_and_repeat_dataset(input_dataset, buffer_size, seed, seed2, count, output_types, output_shapes, name=None):
5441   r"""Creates a dataset that shuffles and repeats elements from `input_dataset`
5442 
5443   pseudorandomly.
5444 
5445   Args:
5446     input_dataset: A `Tensor` of type `variant`.
5447     buffer_size: A `Tensor` of type `int64`.
5448       The number of output elements to buffer in an iterator over
5449       this dataset. Compare with the `min_after_dequeue` attr when creating a
5450       `RandomShuffleQueue`.
5451     seed: A `Tensor` of type `int64`.
5452       A scalar seed for the random number generator. If either `seed` or
5453       `seed2` is set to be non-zero, the random number generator is seeded
5454       by the given seed.  Otherwise, a random seed is used.
5455     seed2: A `Tensor` of type `int64`.
5456       A second scalar seed to avoid seed collision.
5457     count: A `Tensor` of type `int64`.
5458       A scalar representing the number of times the underlying dataset
5459       should be repeated. The default is `-1`, which results in infinite repetition.
5460     output_types: A list of `tf.DTypes` that has length `>= 1`.
5461     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5462     name: A name for the operation (optional).
5463 
5464   Returns:
5465     A `Tensor` of type `variant`.
5466   """
5467   _ctx = _context._context
5468   if _ctx is None or not _ctx._eager_context.is_eager:
5469     if not isinstance(output_types, (list, tuple)):
5470       raise TypeError(
5471           "Expected list for 'output_types' argument to "
5472           "'shuffle_and_repeat_dataset' Op, not %r." % output_types)
5473     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5474     if not isinstance(output_shapes, (list, tuple)):
5475       raise TypeError(
5476           "Expected list for 'output_shapes' argument to "
5477           "'shuffle_and_repeat_dataset' Op, not %r." % output_shapes)
5478     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5479     _, _, _op = _op_def_lib._apply_op_helper(
5480         "ShuffleAndRepeatDataset", input_dataset=input_dataset,
5481         buffer_size=buffer_size, seed=seed, seed2=seed2, count=count,
5482         output_types=output_types, output_shapes=output_shapes, name=name)
5483     _result = _op.outputs[:]
5484     _inputs_flat = _op.inputs
5485     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
5486               _op.get_attr("output_shapes"))
5487     _execute.record_gradient(
5488       "ShuffleAndRepeatDataset", _inputs_flat, _attrs, _result, name)
5489     _result, = _result
5490     return _result
5491 
5492   else:
5493     try:
5494       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5495         _ctx._context_handle, _ctx._eager_context.device_name,
5496         "ShuffleAndRepeatDataset", name, _ctx._post_execution_callbacks,
5497         input_dataset, buffer_size, seed, seed2, count, "output_types",
5498         output_types, "output_shapes", output_shapes)
5499       return _result
5500     except _core._FallbackException:
5501       return shuffle_and_repeat_dataset_eager_fallback(
5502           input_dataset, buffer_size, seed, seed2, count,
5503           output_types=output_types, output_shapes=output_shapes, name=name,
5504           ctx=_ctx)
5505     except _core._NotOkStatusException as e:
5506       if name is not None:
5507         message = e.message + " name: " + name
5508       else:
5509         message = e.message
5510       _six.raise_from(_core._status_to_exception(e.code, message), None)
5511 
5512 
5513 def shuffle_and_repeat_dataset_eager_fallback(input_dataset, buffer_size, seed, seed2, count, output_types, output_shapes, name=None, ctx=None):
5514   r"""This is the slowpath function for Eager mode.
5515   This is for function shuffle_and_repeat_dataset
5516   """
5517   _ctx = ctx if ctx else _context.context()
5518   if not isinstance(output_types, (list, tuple)):
5519     raise TypeError(
5520         "Expected list for 'output_types' argument to "
5521         "'shuffle_and_repeat_dataset' Op, not %r." % output_types)
5522   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5523   if not isinstance(output_shapes, (list, tuple)):
5524     raise TypeError(
5525         "Expected list for 'output_shapes' argument to "
5526         "'shuffle_and_repeat_dataset' Op, not %r." % output_shapes)
5527   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5528   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5529   buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64)
5530   seed = _ops.convert_to_tensor(seed, _dtypes.int64)
5531   seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64)
5532   count = _ops.convert_to_tensor(count, _dtypes.int64)
5533   _inputs_flat = [input_dataset, buffer_size, seed, seed2, count]
5534   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
5535   _result = _execute.execute(b"ShuffleAndRepeatDataset", 1,
5536                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
5537                              name=name)
5538   _execute.record_gradient(
5539       "ShuffleAndRepeatDataset", _inputs_flat, _attrs, _result, name)
5540   _result, = _result
5541   return _result
5542 
5543 
5544 def shuffle_dataset(input_dataset, buffer_size, seed, seed2, output_types, output_shapes, reshuffle_each_iteration=True, name=None):
5545   r"""Creates a dataset that shuffles elements from `input_dataset` pseudorandomly.
5546 
5547   Args:
5548     input_dataset: A `Tensor` of type `variant`.
5549     buffer_size: A `Tensor` of type `int64`.
5550       The number of output elements to buffer in an iterator over
5551       this dataset. Compare with the `min_after_dequeue` attr when creating a
5552       `RandomShuffleQueue`.
5553     seed: A `Tensor` of type `int64`.
5554       A scalar seed for the random number generator. If either `seed` or
5555       `seed2` is set to be non-zero, the random number generator is seeded
5556       by the given seed.  Otherwise, a random seed is used.
5557     seed2: A `Tensor` of type `int64`.
5558       A second scalar seed to avoid seed collision.
5559     output_types: A list of `tf.DTypes` that has length `>= 1`.
5560     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5561     reshuffle_each_iteration: An optional `bool`. Defaults to `True`.
5562       If true, each iterator over this dataset will be given
5563       a different pseudorandomly generated seed, based on a sequence seeded by the
5564       `seed` and `seed2` inputs. If false, each iterator will be given the same
5565       seed, and repeated iteration over this dataset will yield the exact same
5566       sequence of results.
5567     name: A name for the operation (optional).
5568 
5569   Returns:
5570     A `Tensor` of type `variant`.
5571   """
5572   _ctx = _context._context
5573   if _ctx is None or not _ctx._eager_context.is_eager:
5574     if not isinstance(output_types, (list, tuple)):
5575       raise TypeError(
5576           "Expected list for 'output_types' argument to "
5577           "'shuffle_dataset' Op, not %r." % output_types)
5578     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5579     if not isinstance(output_shapes, (list, tuple)):
5580       raise TypeError(
5581           "Expected list for 'output_shapes' argument to "
5582           "'shuffle_dataset' Op, not %r." % output_shapes)
5583     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5584     if reshuffle_each_iteration is None:
5585       reshuffle_each_iteration = True
5586     reshuffle_each_iteration = _execute.make_bool(reshuffle_each_iteration, "reshuffle_each_iteration")
5587     _, _, _op = _op_def_lib._apply_op_helper(
5588         "ShuffleDataset", input_dataset=input_dataset,
5589         buffer_size=buffer_size, seed=seed, seed2=seed2,
5590         output_types=output_types, output_shapes=output_shapes,
5591         reshuffle_each_iteration=reshuffle_each_iteration, name=name)
5592     _result = _op.outputs[:]
5593     _inputs_flat = _op.inputs
5594     _attrs = ("reshuffle_each_iteration",
5595               _op.get_attr("reshuffle_each_iteration"), "output_types",
5596               _op.get_attr("output_types"), "output_shapes",
5597               _op.get_attr("output_shapes"))
5598     _execute.record_gradient(
5599       "ShuffleDataset", _inputs_flat, _attrs, _result, name)
5600     _result, = _result
5601     return _result
5602 
5603   else:
5604     try:
5605       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5606         _ctx._context_handle, _ctx._eager_context.device_name,
5607         "ShuffleDataset", name, _ctx._post_execution_callbacks, input_dataset,
5608         buffer_size, seed, seed2, "reshuffle_each_iteration",
5609         reshuffle_each_iteration, "output_types", output_types,
5610         "output_shapes", output_shapes)
5611       return _result
5612     except _core._FallbackException:
5613       return shuffle_dataset_eager_fallback(
5614           input_dataset, buffer_size, seed, seed2,
5615           reshuffle_each_iteration=reshuffle_each_iteration,
5616           output_types=output_types, output_shapes=output_shapes, name=name,
5617           ctx=_ctx)
5618     except _core._NotOkStatusException as e:
5619       if name is not None:
5620         message = e.message + " name: " + name
5621       else:
5622         message = e.message
5623       _six.raise_from(_core._status_to_exception(e.code, message), None)
5624 
5625 
5626 def shuffle_dataset_eager_fallback(input_dataset, buffer_size, seed, seed2, output_types, output_shapes, reshuffle_each_iteration=True, name=None, ctx=None):
5627   r"""This is the slowpath function for Eager mode.
5628   This is for function shuffle_dataset
5629   """
5630   _ctx = ctx if ctx else _context.context()
5631   if not isinstance(output_types, (list, tuple)):
5632     raise TypeError(
5633         "Expected list for 'output_types' argument to "
5634         "'shuffle_dataset' Op, not %r." % output_types)
5635   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5636   if not isinstance(output_shapes, (list, tuple)):
5637     raise TypeError(
5638         "Expected list for 'output_shapes' argument to "
5639         "'shuffle_dataset' Op, not %r." % output_shapes)
5640   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5641   if reshuffle_each_iteration is None:
5642     reshuffle_each_iteration = True
5643   reshuffle_each_iteration = _execute.make_bool(reshuffle_each_iteration, "reshuffle_each_iteration")
5644   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5645   buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64)
5646   seed = _ops.convert_to_tensor(seed, _dtypes.int64)
5647   seed2 = _ops.convert_to_tensor(seed2, _dtypes.int64)
5648   _inputs_flat = [input_dataset, buffer_size, seed, seed2]
5649   _attrs = ("reshuffle_each_iteration", reshuffle_each_iteration,
5650   "output_types", output_types, "output_shapes", output_shapes)
5651   _result = _execute.execute(b"ShuffleDataset", 1, inputs=_inputs_flat,
5652                              attrs=_attrs, ctx=_ctx, name=name)
5653   _execute.record_gradient(
5654       "ShuffleDataset", _inputs_flat, _attrs, _result, name)
5655   _result, = _result
5656   return _result
5657 
5658 
5659 def sink_dataset(input_dataset, name=None):
5660   r"""A placeholder for input pipeline graph optimizations.
5661 
5662   A placeholder for input pipeline graph optimizations.
5663 
5664   Args:
5665     input_dataset: A `Tensor` of type `variant`.
5666       A variant tensor representing the input dataset.
5667     name: A name for the operation (optional).
5668 
5669   Returns:
5670     A `Tensor` of type `variant`.
5671   """
5672   _ctx = _context._context
5673   if _ctx is None or not _ctx._eager_context.is_eager:
5674     _, _, _op = _op_def_lib._apply_op_helper(
5675         "SinkDataset", input_dataset=input_dataset, name=name)
5676     _result = _op.outputs[:]
5677     _inputs_flat = _op.inputs
5678     _attrs = None
5679     _execute.record_gradient(
5680       "SinkDataset", _inputs_flat, _attrs, _result, name)
5681     _result, = _result
5682     return _result
5683 
5684   else:
5685     try:
5686       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5687         _ctx._context_handle, _ctx._eager_context.device_name, "SinkDataset",
5688         name, _ctx._post_execution_callbacks, input_dataset)
5689       return _result
5690     except _core._FallbackException:
5691       return sink_dataset_eager_fallback(
5692           input_dataset, name=name, ctx=_ctx)
5693     except _core._NotOkStatusException as e:
5694       if name is not None:
5695         message = e.message + " name: " + name
5696       else:
5697         message = e.message
5698       _six.raise_from(_core._status_to_exception(e.code, message), None)
5699 
5700 
5701 def sink_dataset_eager_fallback(input_dataset, name=None, ctx=None):
5702   r"""This is the slowpath function for Eager mode.
5703   This is for function sink_dataset
5704   """
5705   _ctx = ctx if ctx else _context.context()
5706   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5707   _inputs_flat = [input_dataset]
5708   _attrs = None
5709   _result = _execute.execute(b"SinkDataset", 1, inputs=_inputs_flat,
5710                              attrs=_attrs, ctx=_ctx, name=name)
5711   _execute.record_gradient(
5712       "SinkDataset", _inputs_flat, _attrs, _result, name)
5713   _result, = _result
5714   return _result
5715 
5716 
5717 def skip_dataset(input_dataset, count, output_types, output_shapes, name=None):
5718   r"""Creates a dataset that skips `count` elements from the `input_dataset`.
5719 
5720   Args:
5721     input_dataset: A `Tensor` of type `variant`.
5722     count: A `Tensor` of type `int64`.
5723       A scalar representing the number of elements from the `input_dataset`
5724       that should be skipped.  If count is -1, skips everything.
5725     output_types: A list of `tf.DTypes` that has length `>= 1`.
5726     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5727     name: A name for the operation (optional).
5728 
5729   Returns:
5730     A `Tensor` of type `variant`.
5731   """
5732   _ctx = _context._context
5733   if _ctx is None or not _ctx._eager_context.is_eager:
5734     if not isinstance(output_types, (list, tuple)):
5735       raise TypeError(
5736           "Expected list for 'output_types' argument to "
5737           "'skip_dataset' Op, not %r." % output_types)
5738     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5739     if not isinstance(output_shapes, (list, tuple)):
5740       raise TypeError(
5741           "Expected list for 'output_shapes' argument to "
5742           "'skip_dataset' Op, not %r." % output_shapes)
5743     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5744     _, _, _op = _op_def_lib._apply_op_helper(
5745         "SkipDataset", input_dataset=input_dataset, count=count,
5746         output_types=output_types, output_shapes=output_shapes, name=name)
5747     _result = _op.outputs[:]
5748     _inputs_flat = _op.inputs
5749     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
5750               _op.get_attr("output_shapes"))
5751     _execute.record_gradient(
5752       "SkipDataset", _inputs_flat, _attrs, _result, name)
5753     _result, = _result
5754     return _result
5755 
5756   else:
5757     try:
5758       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5759         _ctx._context_handle, _ctx._eager_context.device_name, "SkipDataset",
5760         name, _ctx._post_execution_callbacks, input_dataset, count,
5761         "output_types", output_types, "output_shapes", output_shapes)
5762       return _result
5763     except _core._FallbackException:
5764       return skip_dataset_eager_fallback(
5765           input_dataset, count, output_types=output_types,
5766           output_shapes=output_shapes, name=name, ctx=_ctx)
5767     except _core._NotOkStatusException as e:
5768       if name is not None:
5769         message = e.message + " name: " + name
5770       else:
5771         message = e.message
5772       _six.raise_from(_core._status_to_exception(e.code, message), None)
5773 
5774 
5775 def skip_dataset_eager_fallback(input_dataset, count, output_types, output_shapes, name=None, ctx=None):
5776   r"""This is the slowpath function for Eager mode.
5777   This is for function skip_dataset
5778   """
5779   _ctx = ctx if ctx else _context.context()
5780   if not isinstance(output_types, (list, tuple)):
5781     raise TypeError(
5782         "Expected list for 'output_types' argument to "
5783         "'skip_dataset' Op, not %r." % output_types)
5784   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5785   if not isinstance(output_shapes, (list, tuple)):
5786     raise TypeError(
5787         "Expected list for 'output_shapes' argument to "
5788         "'skip_dataset' Op, not %r." % output_shapes)
5789   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5790   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5791   count = _ops.convert_to_tensor(count, _dtypes.int64)
5792   _inputs_flat = [input_dataset, count]
5793   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
5794   _result = _execute.execute(b"SkipDataset", 1, inputs=_inputs_flat,
5795                              attrs=_attrs, ctx=_ctx, name=name)
5796   _execute.record_gradient(
5797       "SkipDataset", _inputs_flat, _attrs, _result, name)
5798   _result, = _result
5799   return _result
5800 
5801 
5802 def slide_dataset(input_dataset, window_size, window_shift, window_stride, output_types, output_shapes, name=None):
5803   r"""Creates a dataset that passes a sliding window over `input_dataset`.
5804 
5805   Args:
5806     input_dataset: A `Tensor` of type `variant`.
5807     window_size: A `Tensor` of type `int64`.
5808       A scalar representing the number of elements in the
5809       sliding window.
5810     window_shift: A `Tensor` of type `int64`.
5811       A scalar representing the steps moving the sliding window
5812       forward in one iteration. It must be positive.
5813     window_stride: A `Tensor` of type `int64`.
5814       A scalar representing the stride of the input elements of the sliding window.
5815       It must be positive.
5816     output_types: A list of `tf.DTypes` that has length `>= 1`.
5817     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5818     name: A name for the operation (optional).
5819 
5820   Returns:
5821     A `Tensor` of type `variant`.
5822   """
5823   _ctx = _context._context
5824   if _ctx is None or not _ctx._eager_context.is_eager:
5825     if not isinstance(output_types, (list, tuple)):
5826       raise TypeError(
5827           "Expected list for 'output_types' argument to "
5828           "'slide_dataset' Op, not %r." % output_types)
5829     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5830     if not isinstance(output_shapes, (list, tuple)):
5831       raise TypeError(
5832           "Expected list for 'output_shapes' argument to "
5833           "'slide_dataset' Op, not %r." % output_shapes)
5834     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5835     _, _, _op = _op_def_lib._apply_op_helper(
5836         "SlideDataset", input_dataset=input_dataset, window_size=window_size,
5837         window_shift=window_shift, window_stride=window_stride,
5838         output_types=output_types, output_shapes=output_shapes, name=name)
5839     _result = _op.outputs[:]
5840     _inputs_flat = _op.inputs
5841     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
5842               _op.get_attr("output_shapes"))
5843     _execute.record_gradient(
5844       "SlideDataset", _inputs_flat, _attrs, _result, name)
5845     _result, = _result
5846     return _result
5847 
5848   else:
5849     try:
5850       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5851         _ctx._context_handle, _ctx._eager_context.device_name, "SlideDataset",
5852         name, _ctx._post_execution_callbacks, input_dataset, window_size,
5853         window_shift, window_stride, "output_types", output_types,
5854         "output_shapes", output_shapes)
5855       return _result
5856     except _core._FallbackException:
5857       return slide_dataset_eager_fallback(
5858           input_dataset, window_size, window_shift, window_stride,
5859           output_types=output_types, output_shapes=output_shapes, name=name,
5860           ctx=_ctx)
5861     except _core._NotOkStatusException as e:
5862       if name is not None:
5863         message = e.message + " name: " + name
5864       else:
5865         message = e.message
5866       _six.raise_from(_core._status_to_exception(e.code, message), None)
5867 
5868 
5869 def slide_dataset_eager_fallback(input_dataset, window_size, window_shift, window_stride, output_types, output_shapes, name=None, ctx=None):
5870   r"""This is the slowpath function for Eager mode.
5871   This is for function slide_dataset
5872   """
5873   _ctx = ctx if ctx else _context.context()
5874   if not isinstance(output_types, (list, tuple)):
5875     raise TypeError(
5876         "Expected list for 'output_types' argument to "
5877         "'slide_dataset' Op, not %r." % output_types)
5878   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5879   if not isinstance(output_shapes, (list, tuple)):
5880     raise TypeError(
5881         "Expected list for 'output_shapes' argument to "
5882         "'slide_dataset' Op, not %r." % output_shapes)
5883   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5884   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
5885   window_size = _ops.convert_to_tensor(window_size, _dtypes.int64)
5886   window_shift = _ops.convert_to_tensor(window_shift, _dtypes.int64)
5887   window_stride = _ops.convert_to_tensor(window_stride, _dtypes.int64)
5888   _inputs_flat = [input_dataset, window_size, window_shift, window_stride]
5889   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
5890   _result = _execute.execute(b"SlideDataset", 1, inputs=_inputs_flat,
5891                              attrs=_attrs, ctx=_ctx, name=name)
5892   _execute.record_gradient(
5893       "SlideDataset", _inputs_flat, _attrs, _result, name)
5894   _result, = _result
5895   return _result
5896 
5897 
5898 def sparse_tensor_slice_dataset(indices, values, dense_shape, name=None):
5899   r"""Creates a dataset that splits a SparseTensor into elements row-wise.
5900 
5901   Args:
5902     indices: A `Tensor` of type `int64`.
5903     values: A `Tensor`.
5904     dense_shape: A `Tensor` of type `int64`.
5905     name: A name for the operation (optional).
5906 
5907   Returns:
5908     A `Tensor` of type `variant`.
5909   """
5910   _ctx = _context._context
5911   if _ctx is None or not _ctx._eager_context.is_eager:
5912     _, _, _op = _op_def_lib._apply_op_helper(
5913         "SparseTensorSliceDataset", indices=indices, values=values,
5914         dense_shape=dense_shape, name=name)
5915     _result = _op.outputs[:]
5916     _inputs_flat = _op.inputs
5917     _attrs = ("Tvalues", _op.get_attr("Tvalues"))
5918     _execute.record_gradient(
5919       "SparseTensorSliceDataset", _inputs_flat, _attrs, _result, name)
5920     _result, = _result
5921     return _result
5922 
5923   else:
5924     try:
5925       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5926         _ctx._context_handle, _ctx._eager_context.device_name,
5927         "SparseTensorSliceDataset", name, _ctx._post_execution_callbacks,
5928         indices, values, dense_shape)
5929       return _result
5930     except _core._FallbackException:
5931       return sparse_tensor_slice_dataset_eager_fallback(
5932           indices, values, dense_shape, name=name, ctx=_ctx)
5933     except _core._NotOkStatusException as e:
5934       if name is not None:
5935         message = e.message + " name: " + name
5936       else:
5937         message = e.message
5938       _six.raise_from(_core._status_to_exception(e.code, message), None)
5939 
5940 
5941 def sparse_tensor_slice_dataset_eager_fallback(indices, values, dense_shape, name=None, ctx=None):
5942   r"""This is the slowpath function for Eager mode.
5943   This is for function sparse_tensor_slice_dataset
5944   """
5945   _ctx = ctx if ctx else _context.context()
5946   _attr_Tvalues, (values,) = _execute.args_to_matching_eager([values], _ctx)
5947   indices = _ops.convert_to_tensor(indices, _dtypes.int64)
5948   dense_shape = _ops.convert_to_tensor(dense_shape, _dtypes.int64)
5949   _inputs_flat = [indices, values, dense_shape]
5950   _attrs = ("Tvalues", _attr_Tvalues)
5951   _result = _execute.execute(b"SparseTensorSliceDataset", 1,
5952                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
5953                              name=name)
5954   _execute.record_gradient(
5955       "SparseTensorSliceDataset", _inputs_flat, _attrs, _result, name)
5956   _result, = _result
5957   return _result
5958 
5959 
5960 def sql_dataset(driver_name, data_source_name, query, output_types, output_shapes, name=None):
5961   r"""Creates a dataset that executes a SQL query and emits rows of the result set.
5962 
5963   Args:
5964     driver_name: A `Tensor` of type `string`.
5965       The database type. Currently, the only supported type is 'sqlite'.
5966     data_source_name: A `Tensor` of type `string`.
5967       A connection string to connect to the database.
5968     query: A `Tensor` of type `string`. A SQL query to execute.
5969     output_types: A list of `tf.DTypes` that has length `>= 1`.
5970     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
5971     name: A name for the operation (optional).
5972 
5973   Returns:
5974     A `Tensor` of type `variant`.
5975   """
5976   _ctx = _context._context
5977   if _ctx is None or not _ctx._eager_context.is_eager:
5978     if not isinstance(output_types, (list, tuple)):
5979       raise TypeError(
5980           "Expected list for 'output_types' argument to "
5981           "'sql_dataset' Op, not %r." % output_types)
5982     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
5983     if not isinstance(output_shapes, (list, tuple)):
5984       raise TypeError(
5985           "Expected list for 'output_shapes' argument to "
5986           "'sql_dataset' Op, not %r." % output_shapes)
5987     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
5988     _, _, _op = _op_def_lib._apply_op_helper(
5989         "SqlDataset", driver_name=driver_name,
5990         data_source_name=data_source_name, query=query,
5991         output_types=output_types, output_shapes=output_shapes, name=name)
5992     _result = _op.outputs[:]
5993     _inputs_flat = _op.inputs
5994     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
5995               _op.get_attr("output_shapes"))
5996     _execute.record_gradient(
5997       "SqlDataset", _inputs_flat, _attrs, _result, name)
5998     _result, = _result
5999     return _result
6000 
6001   else:
6002     try:
6003       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
6004         _ctx._context_handle, _ctx._eager_context.device_name, "SqlDataset",
6005         name, _ctx._post_execution_callbacks, driver_name, data_source_name,
6006         query, "output_types", output_types, "output_shapes", output_shapes)
6007       return _result
6008     except _core._FallbackException:
6009       return sql_dataset_eager_fallback(
6010           driver_name, data_source_name, query, output_types=output_types,
6011           output_shapes=output_shapes, name=name, ctx=_ctx)
6012     except _core._NotOkStatusException as e:
6013       if name is not None:
6014         message = e.message + " name: " + name
6015       else:
6016         message = e.message
6017       _six.raise_from(_core._status_to_exception(e.code, message), None)
6018 
6019 
6020 def sql_dataset_eager_fallback(driver_name, data_source_name, query, output_types, output_shapes, name=None, ctx=None):
6021   r"""This is the slowpath function for Eager mode.
6022   This is for function sql_dataset
6023   """
6024   _ctx = ctx if ctx else _context.context()
6025   if not isinstance(output_types, (list, tuple)):
6026     raise TypeError(
6027         "Expected list for 'output_types' argument to "
6028         "'sql_dataset' Op, not %r." % output_types)
6029   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6030   if not isinstance(output_shapes, (list, tuple)):
6031     raise TypeError(
6032         "Expected list for 'output_shapes' argument to "
6033         "'sql_dataset' Op, not %r." % output_shapes)
6034   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6035   driver_name = _ops.convert_to_tensor(driver_name, _dtypes.string)
6036   data_source_name = _ops.convert_to_tensor(data_source_name, _dtypes.string)
6037   query = _ops.convert_to_tensor(query, _dtypes.string)
6038   _inputs_flat = [driver_name, data_source_name, query]
6039   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
6040   _result = _execute.execute(b"SqlDataset", 1, inputs=_inputs_flat,
6041                              attrs=_attrs, ctx=_ctx, name=name)
6042   _execute.record_gradient(
6043       "SqlDataset", _inputs_flat, _attrs, _result, name)
6044   _result, = _result
6045   return _result
6046 
6047 
6048 def stats_aggregator_handle(container="", shared_name="", name=None):
6049   r"""Creates a statistics manager resource.
6050 
6051   Args:
6052     container: An optional `string`. Defaults to `""`.
6053     shared_name: An optional `string`. Defaults to `""`.
6054     name: A name for the operation (optional).
6055 
6056   Returns:
6057     A `Tensor` of type `resource`.
6058   """
6059   _ctx = _context._context
6060   if _ctx is None or not _ctx._eager_context.is_eager:
6061     if container is None:
6062       container = ""
6063     container = _execute.make_str(container, "container")
6064     if shared_name is None:
6065       shared_name = ""
6066     shared_name = _execute.make_str(shared_name, "shared_name")
6067     _, _, _op = _op_def_lib._apply_op_helper(
6068         "StatsAggregatorHandle", container=container, shared_name=shared_name,
6069         name=name)
6070     _result = _op.outputs[:]
6071     _inputs_flat = _op.inputs
6072     _attrs = ("container", _op.get_attr("container"), "shared_name",
6073               _op.get_attr("shared_name"))
6074     _execute.record_gradient(
6075       "StatsAggregatorHandle", _inputs_flat, _attrs, _result, name)
6076     _result, = _result
6077     return _result
6078 
6079   else:
6080     try:
6081       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
6082         _ctx._context_handle, _ctx._eager_context.device_name,
6083         "StatsAggregatorHandle", name, _ctx._post_execution_callbacks,
6084         "container", container, "shared_name", shared_name)
6085       return _result
6086     except _core._FallbackException:
6087       return stats_aggregator_handle_eager_fallback(
6088           container=container, shared_name=shared_name, name=name, ctx=_ctx)
6089     except _core._NotOkStatusException as e:
6090       if name is not None:
6091         message = e.message + " name: " + name
6092       else:
6093         message = e.message
6094       _six.raise_from(_core._status_to_exception(e.code, message), None)
6095 
6096 
6097 def stats_aggregator_handle_eager_fallback(container="", shared_name="", name=None, ctx=None):
6098   r"""This is the slowpath function for Eager mode.
6099   This is for function stats_aggregator_handle
6100   """
6101   _ctx = ctx if ctx else _context.context()
6102   if container is None:
6103     container = ""
6104   container = _execute.make_str(container, "container")
6105   if shared_name is None:
6106     shared_name = ""
6107   shared_name = _execute.make_str(shared_name, "shared_name")
6108   _inputs_flat = []
6109   _attrs = ("container", container, "shared_name", shared_name)
6110   _result = _execute.execute(b"StatsAggregatorHandle", 1, inputs=_inputs_flat,
6111                              attrs=_attrs, ctx=_ctx, name=name)
6112   _execute.record_gradient(
6113       "StatsAggregatorHandle", _inputs_flat, _attrs, _result, name)
6114   _result, = _result
6115   return _result
6116 
6117 
6118 def stats_aggregator_summary(iterator, name=None):
6119   r"""Produces a summary of any statistics recorded by the given statistics manager.
6120 
6121   Args:
6122     iterator: A `Tensor` of type `resource`.
6123     name: A name for the operation (optional).
6124 
6125   Returns:
6126     A `Tensor` of type `string`.
6127   """
6128   _ctx = _context._context
6129   if _ctx is None or not _ctx._eager_context.is_eager:
6130     _, _, _op = _op_def_lib._apply_op_helper(
6131         "StatsAggregatorSummary", iterator=iterator, name=name)
6132     _result = _op.outputs[:]
6133     _inputs_flat = _op.inputs
6134     _attrs = None
6135     _execute.record_gradient(
6136       "StatsAggregatorSummary", _inputs_flat, _attrs, _result, name)
6137     _result, = _result
6138     return _result
6139 
6140   else:
6141     try:
6142       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
6143         _ctx._context_handle, _ctx._eager_context.device_name,
6144         "StatsAggregatorSummary", name, _ctx._post_execution_callbacks,
6145         iterator)
6146       return _result
6147     except _core._FallbackException:
6148       return stats_aggregator_summary_eager_fallback(
6149           iterator, name=name, ctx=_ctx)
6150     except _core._NotOkStatusException as e:
6151       if name is not None:
6152         message = e.message + " name: " + name
6153       else:
6154         message = e.message
6155       _six.raise_from(_core._status_to_exception(e.code, message), None)
6156 
6157 
6158 def stats_aggregator_summary_eager_fallback(iterator, name=None, ctx=None):
6159   r"""This is the slowpath function for Eager mode.
6160   This is for function stats_aggregator_summary
6161   """
6162   _ctx = ctx if ctx else _context.context()
6163   iterator = _ops.convert_to_tensor(iterator, _dtypes.resource)
6164   _inputs_flat = [iterator]
6165   _attrs = None
6166   _result = _execute.execute(b"StatsAggregatorSummary", 1,
6167                              inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
6168                              name=name)
6169   _execute.record_gradient(
6170       "StatsAggregatorSummary", _inputs_flat, _attrs, _result, name)
6171   _result, = _result
6172   return _result
6173 
6174 
6175 def tf_record_dataset(filenames, compression_type, buffer_size, name=None):
6176   r"""Creates a dataset that emits the records from one or more TFRecord files.
6177 
6178   Args:
6179     filenames: A `Tensor` of type `string`.
6180       A scalar or vector containing the name(s) of the file(s) to be
6181       read.
6182     compression_type: A `Tensor` of type `string`.
6183       A scalar containing either (i) the empty string (no
6184       compression), (ii) "ZLIB", or (iii) "GZIP".
6185     buffer_size: A `Tensor` of type `int64`.
6186       A scalar representing the number of bytes to buffer. A value of
6187       0 means no buffering will be performed.
6188     name: A name for the operation (optional).
6189 
6190   Returns:
6191     A `Tensor` of type `variant`.
6192   """
6193   _ctx = _context._context
6194   if _ctx is None or not _ctx._eager_context.is_eager:
6195     _, _, _op = _op_def_lib._apply_op_helper(
6196         "TFRecordDataset", filenames=filenames,
6197         compression_type=compression_type, buffer_size=buffer_size, name=name)
6198     _result = _op.outputs[:]
6199     _inputs_flat = _op.inputs
6200     _attrs = None
6201     _execute.record_gradient(
6202       "TFRecordDataset", _inputs_flat, _attrs, _result, name)
6203     _result, = _result
6204     return _result
6205 
6206   else:
6207     try:
6208       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
6209         _ctx._context_handle, _ctx._eager_context.device_name,
6210         "TFRecordDataset", name, _ctx._post_execution_callbacks, filenames,
6211         compression_type, buffer_size)
6212       return _result
6213     except _core._FallbackException:
6214       return tf_record_dataset_eager_fallback(
6215           filenames, compression_type, buffer_size, name=name, ctx=_ctx)
6216     except _core._NotOkStatusException as e:
6217       if name is not None:
6218         message = e.message + " name: " + name
6219       else:
6220         message = e.message
6221       _six.raise_from(_core._status_to_exception(e.code, message), None)
6222 
6223 
6224 def tf_record_dataset_eager_fallback(filenames, compression_type, buffer_size, name=None, ctx=None):
6225   r"""This is the slowpath function for Eager mode.
6226   This is for function tf_record_dataset
6227   """
6228   _ctx = ctx if ctx else _context.context()
6229   filenames = _ops.convert_to_tensor(filenames, _dtypes.string)
6230   compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string)
6231   buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64)
6232   _inputs_flat = [filenames, compression_type, buffer_size]
6233   _attrs = None
6234   _result = _execute.execute(b"TFRecordDataset", 1, inputs=_inputs_flat,
6235                              attrs=_attrs, ctx=_ctx, name=name)
6236   _execute.record_gradient(
6237       "TFRecordDataset", _inputs_flat, _attrs, _result, name)
6238   _result, = _result
6239   return _result
6240 
6241 
6242 def take_dataset(input_dataset, count, output_types, output_shapes, name=None):
6243   r"""Creates a dataset that contains `count` elements from the `input_dataset`.
6244 
6245   Args:
6246     input_dataset: A `Tensor` of type `variant`.
6247     count: A `Tensor` of type `int64`.
6248       A scalar representing the number of elements from the `input_dataset`
6249       that should be taken. A value of `-1` indicates that all of `input_dataset`
6250       is taken.
6251     output_types: A list of `tf.DTypes` that has length `>= 1`.
6252     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6253     name: A name for the operation (optional).
6254 
6255   Returns:
6256     A `Tensor` of type `variant`.
6257   """
6258   _ctx = _context._context
6259   if _ctx is None or not _ctx._eager_context.is_eager:
6260     if not isinstance(output_types, (list, tuple)):
6261       raise TypeError(
6262           "Expected list for 'output_types' argument to "
6263           "'take_dataset' Op, not %r." % output_types)
6264     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6265     if not isinstance(output_shapes, (list, tuple)):
6266       raise TypeError(
6267           "Expected list for 'output_shapes' argument to "
6268           "'take_dataset' Op, not %r." % output_shapes)
6269     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6270     _, _, _op = _op_def_lib._apply_op_helper(
6271         "TakeDataset", input_dataset=input_dataset, count=count,
6272         output_types=output_types, output_shapes=output_shapes, name=name)
6273     _result = _op.outputs[:]
6274     _inputs_flat = _op.inputs
6275     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
6276               _op.get_attr("output_shapes"))
6277     _execute.record_gradient(
6278       "TakeDataset", _inputs_flat, _attrs, _result, name)
6279     _result, = _result
6280     return _result
6281 
6282   else:
6283     try:
6284       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
6285         _ctx._context_handle, _ctx._eager_context.device_name, "TakeDataset",
6286         name, _ctx._post_execution_callbacks, input_dataset, count,
6287         "output_types", output_types, "output_shapes", output_shapes)
6288       return _result
6289     except _core._FallbackException:
6290       return take_dataset_eager_fallback(
6291           input_dataset, count, output_types=output_types,
6292           output_shapes=output_shapes, name=name, ctx=_ctx)
6293     except _core._NotOkStatusException as e:
6294       if name is not None:
6295         message = e.message + " name: " + name
6296       else:
6297         message = e.message
6298       _six.raise_from(_core._status_to_exception(e.code, message), None)
6299 
6300 
6301 def take_dataset_eager_fallback(input_dataset, count, output_types, output_shapes, name=None, ctx=None):
6302   r"""This is the slowpath function for Eager mode.
6303   This is for function take_dataset
6304   """
6305   _ctx = ctx if ctx else _context.context()
6306   if not isinstance(output_types, (list, tuple)):
6307     raise TypeError(
6308         "Expected list for 'output_types' argument to "
6309         "'take_dataset' Op, not %r." % output_types)
6310   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6311   if not isinstance(output_shapes, (list, tuple)):
6312     raise TypeError(
6313         "Expected list for 'output_shapes' argument to "
6314         "'take_dataset' Op, not %r." % output_shapes)
6315   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6316   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
6317   count = _ops.convert_to_tensor(count, _dtypes.int64)
6318   _inputs_flat = [input_dataset, count]
6319   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
6320   _result = _execute.execute(b"TakeDataset", 1, inputs=_inputs_flat,
6321                              attrs=_attrs, ctx=_ctx, name=name)
6322   _execute.record_gradient(
6323       "TakeDataset", _inputs_flat, _attrs, _result, name)
6324   _result, = _result
6325   return _result
6326 
6327 
6328 def tensor_dataset(components, output_shapes, name=None):
6329   r"""Creates a dataset that emits `components` as a tuple of tensors once.
6330 
6331   Args:
6332     components: A list of `Tensor` objects.
6333     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6334     name: A name for the operation (optional).
6335 
6336   Returns:
6337     A `Tensor` of type `variant`.
6338   """
6339   _ctx = _context._context
6340   if _ctx is None or not _ctx._eager_context.is_eager:
6341     if not isinstance(output_shapes, (list, tuple)):
6342       raise TypeError(
6343           "Expected list for 'output_shapes' argument to "
6344           "'tensor_dataset' Op, not %r." % output_shapes)
6345     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6346     _, _, _op = _op_def_lib._apply_op_helper(
6347         "TensorDataset", components=components, output_shapes=output_shapes,
6348         name=name)
6349     _result = _op.outputs[:]
6350     _inputs_flat = _op.inputs
6351     _attrs = ("Toutput_types", _op.get_attr("Toutput_types"), "output_shapes",
6352               _op.get_attr("output_shapes"))
6353     _execute.record_gradient(
6354       "TensorDataset", _inputs_flat, _attrs, _result, name)
6355     _result, = _result
6356     return _result
6357 
6358   else:
6359     try:
6360       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
6361         _ctx._context_handle, _ctx._eager_context.device_name,
6362         "TensorDataset", name, _ctx._post_execution_callbacks, components,
6363         "output_shapes", output_shapes)
6364       return _result
6365     except _core._FallbackException:
6366       return tensor_dataset_eager_fallback(
6367           components, output_shapes=output_shapes, name=name, ctx=_ctx)
6368     except _core._NotOkStatusException as e:
6369       if name is not None:
6370         message = e.message + " name: " + name
6371       else:
6372         message = e.message
6373       _six.raise_from(_core._status_to_exception(e.code, message), None)
6374 
6375 
6376 def tensor_dataset_eager_fallback(components, output_shapes, name=None, ctx=None):
6377   r"""This is the slowpath function for Eager mode.
6378   This is for function tensor_dataset
6379   """
6380   _ctx = ctx if ctx else _context.context()
6381   if not isinstance(output_shapes, (list, tuple)):
6382     raise TypeError(
6383         "Expected list for 'output_shapes' argument to "
6384         "'tensor_dataset' Op, not %r." % output_shapes)
6385   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6386   _attr_Toutput_types, components = _execute.convert_to_mixed_eager_tensors(components, _ctx)
6387   _inputs_flat = list(components)
6388   _attrs = ("Toutput_types", _attr_Toutput_types, "output_shapes",
6389   output_shapes)
6390   _result = _execute.execute(b"TensorDataset", 1, inputs=_inputs_flat,
6391                              attrs=_attrs, ctx=_ctx, name=name)
6392   _execute.record_gradient(
6393       "TensorDataset", _inputs_flat, _attrs, _result, name)
6394   _result, = _result
6395   return _result
6396 
6397 
6398 def tensor_slice_dataset(components, output_shapes, name=None):
6399   r"""Creates a dataset that emits each dim-0 slice of `components` once.
6400 
6401   Args:
6402     components: A list of `Tensor` objects.
6403     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6404     name: A name for the operation (optional).
6405 
6406   Returns:
6407     A `Tensor` of type `variant`.
6408   """
6409   _ctx = _context._context
6410   if _ctx is None or not _ctx._eager_context.is_eager:
6411     if not isinstance(output_shapes, (list, tuple)):
6412       raise TypeError(
6413           "Expected list for 'output_shapes' argument to "
6414           "'tensor_slice_dataset' Op, not %r." % output_shapes)
6415     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6416     _, _, _op = _op_def_lib._apply_op_helper(
6417         "TensorSliceDataset", components=components,
6418         output_shapes=output_shapes, name=name)
6419     _result = _op.outputs[:]
6420     _inputs_flat = _op.inputs
6421     _attrs = ("Toutput_types", _op.get_attr("Toutput_types"), "output_shapes",
6422               _op.get_attr("output_shapes"))
6423     _execute.record_gradient(
6424       "TensorSliceDataset", _inputs_flat, _attrs, _result, name)
6425     _result, = _result
6426     return _result
6427 
6428   else:
6429     try:
6430       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
6431         _ctx._context_handle, _ctx._eager_context.device_name,
6432         "TensorSliceDataset", name, _ctx._post_execution_callbacks,
6433         components, "output_shapes", output_shapes)
6434       return _result
6435     except _core._FallbackException:
6436       return tensor_slice_dataset_eager_fallback(
6437           components, output_shapes=output_shapes, name=name, ctx=_ctx)
6438     except _core._NotOkStatusException as e:
6439       if name is not None:
6440         message = e.message + " name: " + name
6441       else:
6442         message = e.message
6443       _six.raise_from(_core._status_to_exception(e.code, message), None)
6444 
6445 
6446 def tensor_slice_dataset_eager_fallback(components, output_shapes, name=None, ctx=None):
6447   r"""This is the slowpath function for Eager mode.
6448   This is for function tensor_slice_dataset
6449   """
6450   _ctx = ctx if ctx else _context.context()
6451   if not isinstance(output_shapes, (list, tuple)):
6452     raise TypeError(
6453         "Expected list for 'output_shapes' argument to "
6454         "'tensor_slice_dataset' Op, not %r." % output_shapes)
6455   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6456   _attr_Toutput_types, components = _execute.convert_to_mixed_eager_tensors(components, _ctx)
6457   _inputs_flat = list(components)
6458   _attrs = ("Toutput_types", _attr_Toutput_types, "output_shapes",
6459   output_shapes)
6460   _result = _execute.execute(b"TensorSliceDataset", 1, inputs=_inputs_flat,
6461                              attrs=_attrs, ctx=_ctx, name=name)
6462   _execute.record_gradient(
6463       "TensorSliceDataset", _inputs_flat, _attrs, _result, name)
6464   _result, = _result
6465   return _result
6466 
6467 
6468 def text_line_dataset(filenames, compression_type, buffer_size, name=None):
6469   r"""Creates a dataset that emits the lines of one or more text files.
6470 
6471   Args:
6472     filenames: A `Tensor` of type `string`.
6473       A scalar or a vector containing the name(s) of the file(s) to be
6474       read.
6475     compression_type: A `Tensor` of type `string`.
6476       A scalar containing either (i) the empty string (no
6477       compression), (ii) "ZLIB", or (iii) "GZIP".
6478     buffer_size: A `Tensor` of type `int64`.
6479       A scalar containing the number of bytes to buffer.
6480     name: A name for the operation (optional).
6481 
6482   Returns:
6483     A `Tensor` of type `variant`.
6484   """
6485   _ctx = _context._context
6486   if _ctx is None or not _ctx._eager_context.is_eager:
6487     _, _, _op = _op_def_lib._apply_op_helper(
6488         "TextLineDataset", filenames=filenames,
6489         compression_type=compression_type, buffer_size=buffer_size, name=name)
6490     _result = _op.outputs[:]
6491     _inputs_flat = _op.inputs
6492     _attrs = None
6493     _execute.record_gradient(
6494       "TextLineDataset", _inputs_flat, _attrs, _result, name)
6495     _result, = _result
6496     return _result
6497 
6498   else:
6499     try:
6500       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
6501         _ctx._context_handle, _ctx._eager_context.device_name,
6502         "TextLineDataset", name, _ctx._post_execution_callbacks, filenames,
6503         compression_type, buffer_size)
6504       return _result
6505     except _core._FallbackException:
6506       return text_line_dataset_eager_fallback(
6507           filenames, compression_type, buffer_size, name=name, ctx=_ctx)
6508     except _core._NotOkStatusException as e:
6509       if name is not None:
6510         message = e.message + " name: " + name
6511       else:
6512         message = e.message
6513       _six.raise_from(_core._status_to_exception(e.code, message), None)
6514 
6515 
6516 def text_line_dataset_eager_fallback(filenames, compression_type, buffer_size, name=None, ctx=None):
6517   r"""This is the slowpath function for Eager mode.
6518   This is for function text_line_dataset
6519   """
6520   _ctx = ctx if ctx else _context.context()
6521   filenames = _ops.convert_to_tensor(filenames, _dtypes.string)
6522   compression_type = _ops.convert_to_tensor(compression_type, _dtypes.string)
6523   buffer_size = _ops.convert_to_tensor(buffer_size, _dtypes.int64)
6524   _inputs_flat = [filenames, compression_type, buffer_size]
6525   _attrs = None
6526   _result = _execute.execute(b"TextLineDataset", 1, inputs=_inputs_flat,
6527                              attrs=_attrs, ctx=_ctx, name=name)
6528   _execute.record_gradient(
6529       "TextLineDataset", _inputs_flat, _attrs, _result, name)
6530   _result, = _result
6531   return _result
6532 
6533 
6534 def unbatch_dataset(input_dataset, output_types, output_shapes, name=None):
6535   r"""A dataset that splits the elements of its input into multiple elements.
6536 
6537   Args:
6538     input_dataset: A `Tensor` of type `variant`.
6539     output_types: A list of `tf.DTypes` that has length `>= 1`.
6540     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6541     name: A name for the operation (optional).
6542 
6543   Returns:
6544     A `Tensor` of type `variant`.
6545   """
6546   _ctx = _context._context
6547   if _ctx is None or not _ctx._eager_context.is_eager:
6548     if not isinstance(output_types, (list, tuple)):
6549       raise TypeError(
6550           "Expected list for 'output_types' argument to "
6551           "'unbatch_dataset' Op, not %r." % output_types)
6552     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6553     if not isinstance(output_shapes, (list, tuple)):
6554       raise TypeError(
6555           "Expected list for 'output_shapes' argument to "
6556           "'unbatch_dataset' Op, not %r." % output_shapes)
6557     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6558     _, _, _op = _op_def_lib._apply_op_helper(
6559         "UnbatchDataset", input_dataset=input_dataset,
6560         output_types=output_types, output_shapes=output_shapes, name=name)
6561     _result = _op.outputs[:]
6562     _inputs_flat = _op.inputs
6563     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
6564               _op.get_attr("output_shapes"))
6565     _execute.record_gradient(
6566       "UnbatchDataset", _inputs_flat, _attrs, _result, name)
6567     _result, = _result
6568     return _result
6569 
6570   else:
6571     try:
6572       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
6573         _ctx._context_handle, _ctx._eager_context.device_name,
6574         "UnbatchDataset", name, _ctx._post_execution_callbacks, input_dataset,
6575         "output_types", output_types, "output_shapes", output_shapes)
6576       return _result
6577     except _core._FallbackException:
6578       return unbatch_dataset_eager_fallback(
6579           input_dataset, output_types=output_types,
6580           output_shapes=output_shapes, name=name, ctx=_ctx)
6581     except _core._NotOkStatusException as e:
6582       if name is not None:
6583         message = e.message + " name: " + name
6584       else:
6585         message = e.message
6586       _six.raise_from(_core._status_to_exception(e.code, message), None)
6587 
6588 
6589 def unbatch_dataset_eager_fallback(input_dataset, output_types, output_shapes, name=None, ctx=None):
6590   r"""This is the slowpath function for Eager mode.
6591   This is for function unbatch_dataset
6592   """
6593   _ctx = ctx if ctx else _context.context()
6594   if not isinstance(output_types, (list, tuple)):
6595     raise TypeError(
6596         "Expected list for 'output_types' argument to "
6597         "'unbatch_dataset' Op, not %r." % output_types)
6598   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6599   if not isinstance(output_shapes, (list, tuple)):
6600     raise TypeError(
6601         "Expected list for 'output_shapes' argument to "
6602         "'unbatch_dataset' Op, not %r." % output_shapes)
6603   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6604   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
6605   _inputs_flat = [input_dataset]
6606   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
6607   _result = _execute.execute(b"UnbatchDataset", 1, inputs=_inputs_flat,
6608                              attrs=_attrs, ctx=_ctx, name=name)
6609   _execute.record_gradient(
6610       "UnbatchDataset", _inputs_flat, _attrs, _result, name)
6611   _result, = _result
6612   return _result
6613 
6614 
6615 def window_dataset(input_dataset, size, shift, stride, drop_remainder, output_types, output_shapes, name=None):
6616   r"""A dataset that creates window datasets from the input dataset.
6617 
6618   Args:
6619     input_dataset: A `Tensor` of type `variant`.
6620     size: A `Tensor` of type `int64`.
6621       A scalar representing the number of elements to accumulate in a window.
6622     shift: A `Tensor` of type `int64`.
6623       A scalar representing the steps moving the sliding window forward in one
6624       iteration. It must be positive.
6625     stride: A `Tensor` of type `int64`.
6626       A scalar representing the stride of the input elements of the sliding window.
6627       It must be positive.
6628     drop_remainder: A `Tensor` of type `bool`.
6629       A scalar representing whether a window should be dropped in case its size is
6630       smaller than desired.
6631     output_types: A list of `tf.DTypes` that has length `>= 1`.
6632     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6633     name: A name for the operation (optional).
6634 
6635   Returns:
6636     A `Tensor` of type `variant`.
6637   """
6638   _ctx = _context._context
6639   if _ctx is None or not _ctx._eager_context.is_eager:
6640     if not isinstance(output_types, (list, tuple)):
6641       raise TypeError(
6642           "Expected list for 'output_types' argument to "
6643           "'window_dataset' Op, not %r." % output_types)
6644     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6645     if not isinstance(output_shapes, (list, tuple)):
6646       raise TypeError(
6647           "Expected list for 'output_shapes' argument to "
6648           "'window_dataset' Op, not %r." % output_shapes)
6649     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6650     _, _, _op = _op_def_lib._apply_op_helper(
6651         "WindowDataset", input_dataset=input_dataset, size=size, shift=shift,
6652         stride=stride, drop_remainder=drop_remainder,
6653         output_types=output_types, output_shapes=output_shapes, name=name)
6654     _result = _op.outputs[:]
6655     _inputs_flat = _op.inputs
6656     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
6657               _op.get_attr("output_shapes"))
6658     _execute.record_gradient(
6659       "WindowDataset", _inputs_flat, _attrs, _result, name)
6660     _result, = _result
6661     return _result
6662 
6663   else:
6664     try:
6665       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
6666         _ctx._context_handle, _ctx._eager_context.device_name,
6667         "WindowDataset", name, _ctx._post_execution_callbacks, input_dataset,
6668         size, shift, stride, drop_remainder, "output_types", output_types,
6669         "output_shapes", output_shapes)
6670       return _result
6671     except _core._FallbackException:
6672       return window_dataset_eager_fallback(
6673           input_dataset, size, shift, stride, drop_remainder,
6674           output_types=output_types, output_shapes=output_shapes, name=name,
6675           ctx=_ctx)
6676     except _core._NotOkStatusException as e:
6677       if name is not None:
6678         message = e.message + " name: " + name
6679       else:
6680         message = e.message
6681       _six.raise_from(_core._status_to_exception(e.code, message), None)
6682 
6683 
6684 def window_dataset_eager_fallback(input_dataset, size, shift, stride, drop_remainder, output_types, output_shapes, name=None, ctx=None):
6685   r"""This is the slowpath function for Eager mode.
6686   This is for function window_dataset
6687   """
6688   _ctx = ctx if ctx else _context.context()
6689   if not isinstance(output_types, (list, tuple)):
6690     raise TypeError(
6691         "Expected list for 'output_types' argument to "
6692         "'window_dataset' Op, not %r." % output_types)
6693   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6694   if not isinstance(output_shapes, (list, tuple)):
6695     raise TypeError(
6696         "Expected list for 'output_shapes' argument to "
6697         "'window_dataset' Op, not %r." % output_shapes)
6698   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6699   input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
6700   size = _ops.convert_to_tensor(size, _dtypes.int64)
6701   shift = _ops.convert_to_tensor(shift, _dtypes.int64)
6702   stride = _ops.convert_to_tensor(stride, _dtypes.int64)
6703   drop_remainder = _ops.convert_to_tensor(drop_remainder, _dtypes.bool)
6704   _inputs_flat = [input_dataset, size, shift, stride, drop_remainder]
6705   _attrs = ("output_types", output_types, "output_shapes", output_shapes)
6706   _result = _execute.execute(b"WindowDataset", 1, inputs=_inputs_flat,
6707                              attrs=_attrs, ctx=_ctx, name=name)
6708   _execute.record_gradient(
6709       "WindowDataset", _inputs_flat, _attrs, _result, name)
6710   _result, = _result
6711   return _result
6712 
6713 
6714 def zip_dataset(input_datasets, output_types, output_shapes, name=None):
6715   r"""Creates a dataset that zips together `input_datasets`.
6716 
6717   Args:
6718     input_datasets: A list of at least 1 `Tensor` objects with type `variant`.
6719     output_types: A list of `tf.DTypes` that has length `>= 1`.
6720     output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`.
6721     name: A name for the operation (optional).
6722 
6723   Returns:
6724     A `Tensor` of type `variant`.
6725   """
6726   _ctx = _context._context
6727   if _ctx is None or not _ctx._eager_context.is_eager:
6728     if not isinstance(input_datasets, (list, tuple)):
6729       raise TypeError(
6730           "Expected list for 'input_datasets' argument to "
6731           "'zip_dataset' Op, not %r." % input_datasets)
6732     _attr_N = len(input_datasets)
6733     if not isinstance(output_types, (list, tuple)):
6734       raise TypeError(
6735           "Expected list for 'output_types' argument to "
6736           "'zip_dataset' Op, not %r." % output_types)
6737     output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6738     if not isinstance(output_shapes, (list, tuple)):
6739       raise TypeError(
6740           "Expected list for 'output_shapes' argument to "
6741           "'zip_dataset' Op, not %r." % output_shapes)
6742     output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6743     _, _, _op = _op_def_lib._apply_op_helper(
6744         "ZipDataset", input_datasets=input_datasets,
6745         output_types=output_types, output_shapes=output_shapes, name=name)
6746     _result = _op.outputs[:]
6747     _inputs_flat = _op.inputs
6748     _attrs = ("output_types", _op.get_attr("output_types"), "output_shapes",
6749               _op.get_attr("output_shapes"), "N", _op.get_attr("N"))
6750     _execute.record_gradient(
6751       "ZipDataset", _inputs_flat, _attrs, _result, name)
6752     _result, = _result
6753     return _result
6754 
6755   else:
6756     try:
6757       _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
6758         _ctx._context_handle, _ctx._eager_context.device_name, "ZipDataset",
6759         name, _ctx._post_execution_callbacks, input_datasets, "output_types",
6760         output_types, "output_shapes", output_shapes)
6761       return _result
6762     except _core._FallbackException:
6763       return zip_dataset_eager_fallback(
6764           input_datasets, output_types=output_types,
6765           output_shapes=output_shapes, name=name, ctx=_ctx)
6766     except _core._NotOkStatusException as e:
6767       if name is not None:
6768         message = e.message + " name: " + name
6769       else:
6770         message = e.message
6771       _six.raise_from(_core._status_to_exception(e.code, message), None)
6772 
6773 
6774 def zip_dataset_eager_fallback(input_datasets, output_types, output_shapes, name=None, ctx=None):
6775   r"""This is the slowpath function for Eager mode.
6776   This is for function zip_dataset
6777   """
6778   _ctx = ctx if ctx else _context.context()
6779   if not isinstance(input_datasets, (list, tuple)):
6780     raise TypeError(
6781         "Expected list for 'input_datasets' argument to "
6782         "'zip_dataset' Op, not %r." % input_datasets)
6783   _attr_N = len(input_datasets)
6784   if not isinstance(output_types, (list, tuple)):
6785     raise TypeError(
6786         "Expected list for 'output_types' argument to "
6787         "'zip_dataset' Op, not %r." % output_types)
6788   output_types = [_execute.make_type(_t, "output_types") for _t in output_types]
6789   if not isinstance(output_shapes, (list, tuple)):
6790     raise TypeError(
6791         "Expected list for 'output_shapes' argument to "
6792         "'zip_dataset' Op, not %r." % output_shapes)
6793   output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes]
6794   input_datasets = _ops.convert_n_to_tensor(input_datasets, _dtypes.variant)
6795   _inputs_flat = list(input_datasets)
6796   _attrs = ("output_types", output_types, "output_shapes", output_shapes, "N",
6797   _attr_N)
6798   _result = _execute.execute(b"ZipDataset", 1, inputs=_inputs_flat,
6799                              attrs=_attrs, ctx=_ctx, name=name)
6800   _execute.record_gradient(
6801       "ZipDataset", _inputs_flat, _attrs, _result, name)
6802   _result, = _result
6803   return _result
6804 
6805 def _InitOpDefLibrary(op_list_proto_bytes):
6806   op_list = _op_def_pb2.OpList()
6807   op_list.ParseFromString(op_list_proto_bytes)
6808   _op_def_registry.register_op_list(op_list)
6809   op_def_lib = _op_def_library.OpDefLibrary()
6810   op_def_lib.add_op_list(op_list)
6811   return op_def_lib
6812 # op {
6813 #   name: "AnonymousIterator"
6814 #   output_arg {
6815 #     name: "handle"
6816 #     type: DT_RESOURCE
6817 #   }
6818 #   attr {
6819 #     name: "output_types"
6820 #     type: "list(type)"
6821 #     has_minimum: true
6822 #     minimum: 1
6823 #   }
6824 #   attr {
6825 #     name: "output_shapes"
6826 #     type: "list(shape)"
6827 #     has_minimum: true
6828 #     minimum: 1
6829 #   }
6830 #   is_stateful: true
6831 # }
6832 # op {
6833 #   name: "BatchDataset"
6834 #   input_arg {
6835 #     name: "input_dataset"
6836 #     type: DT_VARIANT
6837 #   }
6838 #   input_arg {
6839 #     name: "batch_size"
6840 #     type: DT_INT64
6841 #   }
6842 #   output_arg {
6843 #     name: "handle"
6844 #     type: DT_VARIANT
6845 #   }
6846 #   attr {
6847 #     name: "output_types"
6848 #     type: "list(type)"
6849 #     has_minimum: true
6850 #     minimum: 1
6851 #   }
6852 #   attr {
6853 #     name: "output_shapes"
6854 #     type: "list(shape)"
6855 #     has_minimum: true
6856 #     minimum: 1
6857 #   }
6858 # }
6859 # op {
6860 #   name: "BatchDatasetV2"
6861 #   input_arg {
6862 #     name: "input_dataset"
6863 #     type: DT_VARIANT
6864 #   }
6865 #   input_arg {
6866 #     name: "batch_size"
6867 #     type: DT_INT64
6868 #   }
6869 #   input_arg {
6870 #     name: "drop_remainder"
6871 #     type: DT_BOOL
6872 #   }
6873 #   output_arg {
6874 #     name: "handle"
6875 #     type: DT_VARIANT
6876 #   }
6877 #   attr {
6878 #     name: "output_types"
6879 #     type: "list(type)"
6880 #     has_minimum: true
6881 #     minimum: 1
6882 #   }
6883 #   attr {
6884 #     name: "output_shapes"
6885 #     type: "list(shape)"
6886 #     has_minimum: true
6887 #     minimum: 1
6888 #   }
6889 # }
6890 # op {
6891 #   name: "BytesProducedStatsDataset"
6892 #   input_arg {
6893 #     name: "input_dataset"
6894 #     type: DT_VARIANT
6895 #   }
6896 #   input_arg {
6897 #     name: "tag"
6898 #     type: DT_STRING
6899 #   }
6900 #   output_arg {
6901 #     name: "handle"
6902 #     type: DT_VARIANT
6903 #   }
6904 #   attr {
6905 #     name: "output_types"
6906 #     type: "list(type)"
6907 #     has_minimum: true
6908 #     minimum: 1
6909 #   }
6910 #   attr {
6911 #     name: "output_shapes"
6912 #     type: "list(shape)"
6913 #     has_minimum: true
6914 #     minimum: 1
6915 #   }
6916 # }
6917 # op {
6918 #   name: "CacheDataset"
6919 #   input_arg {
6920 #     name: "input_dataset"
6921 #     type: DT_VARIANT
6922 #   }
6923 #   input_arg {
6924 #     name: "filename"
6925 #     type: DT_STRING
6926 #   }
6927 #   output_arg {
6928 #     name: "handle"
6929 #     type: DT_VARIANT
6930 #   }
6931 #   attr {
6932 #     name: "output_types"
6933 #     type: "list(type)"
6934 #     has_minimum: true
6935 #     minimum: 1
6936 #   }
6937 #   attr {
6938 #     name: "output_shapes"
6939 #     type: "list(shape)"
6940 #     has_minimum: true
6941 #     minimum: 1
6942 #   }
6943 # }
6944 # op {
6945 #   name: "ConcatenateDataset"
6946 #   input_arg {
6947 #     name: "input_dataset"
6948 #     type: DT_VARIANT
6949 #   }
6950 #   input_arg {
6951 #     name: "another_dataset"
6952 #     type: DT_VARIANT
6953 #   }
6954 #   output_arg {
6955 #     name: "handle"
6956 #     type: DT_VARIANT
6957 #   }
6958 #   attr {
6959 #     name: "output_types"
6960 #     type: "list(type)"
6961 #     has_minimum: true
6962 #     minimum: 1
6963 #   }
6964 #   attr {
6965 #     name: "output_shapes"
6966 #     type: "list(shape)"
6967 #     has_minimum: true
6968 #     minimum: 1
6969 #   }
6970 # }
6971 # op {
6972 #   name: "DatasetToGraph"
6973 #   input_arg {
6974 #     name: "input_dataset"
6975 #     type: DT_VARIANT
6976 #   }
6977 #   output_arg {
6978 #     name: "graph"
6979 #     type: DT_STRING
6980 #   }
6981 # }
6982 # op {
6983 #   name: "DatasetToSingleElement"
6984 #   input_arg {
6985 #     name: "dataset"
6986 #     type: DT_VARIANT
6987 #   }
6988 #   output_arg {
6989 #     name: "components"
6990 #     type_list_attr: "output_types"
6991 #   }
6992 #   attr {
6993 #     name: "output_types"
6994 #     type: "list(type)"
6995 #     has_minimum: true
6996 #     minimum: 1
6997 #   }
6998 #   attr {
6999 #     name: "output_shapes"
7000 #     type: "list(shape)"
7001 #     has_minimum: true
7002 #     minimum: 1
7003 #   }
7004 # }
7005 # op {
7006 #   name: "DatasetToTFRecord"
7007 #   input_arg {
7008 #     name: "input_dataset"
7009 #     type: DT_VARIANT
7010 #   }
7011 #   input_arg {
7012 #     name: "filename"
7013 #     type: DT_STRING
7014 #   }
7015 #   input_arg {
7016 #     name: "compression_type"
7017 #     type: DT_STRING
7018 #   }
7019 # }
7020 # op {
7021 #   name: "DenseToSparseBatchDataset"
7022 #   input_arg {
7023 #     name: "input_dataset"
7024 #     type: DT_VARIANT
7025 #   }
7026 #   input_arg {
7027 #     name: "batch_size"
7028 #     type: DT_INT64
7029 #   }
7030 #   input_arg {
7031 #     name: "row_shape"
7032 #     type: DT_INT64
7033 #   }
7034 #   output_arg {
7035 #     name: "handle"
7036 #     type: DT_VARIANT
7037 #   }
7038 #   attr {
7039 #     name: "output_types"
7040 #     type: "list(type)"
7041 #     has_minimum: true
7042 #     minimum: 1
7043 #   }
7044 #   attr {
7045 #     name: "output_shapes"
7046 #     type: "list(shape)"
7047 #     has_minimum: true
7048 #     minimum: 1
7049 #   }
7050 # }
7051 # op {
7052 #   name: "DeserializeIterator"
7053 #   input_arg {
7054 #     name: "resource_handle"
7055 #     type: DT_RESOURCE
7056 #   }
7057 #   input_arg {
7058 #     name: "serialized"
7059 #     type: DT_VARIANT
7060 #   }
7061 #   is_stateful: true
7062 # }
7063 # op {
7064 #   name: "EnqueueInQueueDataset"
7065 #   input_arg {
7066 #     name: "queue"
7067 #     type: DT_VARIANT
7068 #   }
7069 #   input_arg {
7070 #     name: "components"
7071 #     type_list_attr: "Tcomponents"
7072 #   }
7073 #   attr {
7074 #     name: "Tcomponents"
7075 #     type: "list(type)"
7076 #     has_minimum: true
7077 #     minimum: 1
7078 #   }
7079 #   is_stateful: true
7080 # }
7081 # op {
7082 #   name: "FilterByLastComponentDataset"
7083 #   input_arg {
7084 #     name: "input_dataset"
7085 #     type: DT_VARIANT
7086 #   }
7087 #   output_arg {
7088 #     name: "output"
7089 #     type: DT_VARIANT
7090 #   }
7091 #   attr {
7092 #     name: "output_types"
7093 #     type: "list(type)"
7094 #     has_minimum: true
7095 #     minimum: 1
7096 #   }
7097 #   attr {
7098 #     name: "output_shapes"
7099 #     type: "list(shape)"
7100 #     has_minimum: true
7101 #     minimum: 1
7102 #   }
7103 # }
7104 # op {
7105 #   name: "FilterDataset"
7106 #   input_arg {
7107 #     name: "input_dataset"
7108 #     type: DT_VARIANT
7109 #   }
7110 #   input_arg {
7111 #     name: "other_arguments"
7112 #     type_list_attr: "Targuments"
7113 #   }
7114 #   output_arg {
7115 #     name: "handle"
7116 #     type: DT_VARIANT
7117 #   }
7118 #   attr {
7119 #     name: "predicate"
7120 #     type: "func"
7121 #   }
7122 #   attr {
7123 #     name: "Targuments"
7124 #     type: "list(type)"
7125 #     has_minimum: true
7126 #   }
7127 #   attr {
7128 #     name: "output_types"
7129 #     type: "list(type)"
7130 #     has_minimum: true
7131 #     minimum: 1
7132 #   }
7133 #   attr {
7134 #     name: "output_shapes"
7135 #     type: "list(shape)"
7136 #     has_minimum: true
7137 #     minimum: 1
7138 #   }
7139 # }
7140 # op {
7141 #   name: "FixedLengthRecordDataset"
7142 #   input_arg {
7143 #     name: "filenames"
7144 #     type: DT_STRING
7145 #   }
7146 #   input_arg {
7147 #     name: "header_bytes"
7148 #     type: DT_INT64
7149 #   }
7150 #   input_arg {
7151 #     name: "record_bytes"
7152 #     type: DT_INT64
7153 #   }
7154 #   input_arg {
7155 #     name: "footer_bytes"
7156 #     type: DT_INT64
7157 #   }
7158 #   input_arg {
7159 #     name: "buffer_size"
7160 #     type: DT_INT64
7161 #   }
7162 #   output_arg {
7163 #     name: "handle"
7164 #     type: DT_VARIANT
7165 #   }
7166 #   is_stateful: true
7167 # }
7168 # op {
7169 #   name: "FlatMapDataset"
7170 #   input_arg {
7171 #     name: "input_dataset"
7172 #     type: DT_VARIANT
7173 #   }
7174 #   input_arg {
7175 #     name: "other_arguments"
7176 #     type_list_attr: "Targuments"
7177 #   }
7178 #   output_arg {
7179 #     name: "handle"
7180 #     type: DT_VARIANT
7181 #   }
7182 #   attr {
7183 #     name: "f"
7184 #     type: "func"
7185 #   }
7186 #   attr {
7187 #     name: "Targuments"
7188 #     type: "list(type)"
7189 #     has_minimum: true
7190 #   }
7191 #   attr {
7192 #     name: "output_types"
7193 #     type: "list(type)"
7194 #     has_minimum: true
7195 #     minimum: 1
7196 #   }
7197 #   attr {
7198 #     name: "output_shapes"
7199 #     type: "list(shape)"
7200 #     has_minimum: true
7201 #     minimum: 1
7202 #   }
7203 # }
7204 # op {
7205 #   name: "GeneratorDataset"
7206 #   input_arg {
7207 #     name: "init_func_other_args"
7208 #     type_list_attr: "Tinit_func_args"
7209 #   }
7210 #   input_arg {
7211 #     name: "next_func_other_args"
7212 #     type_list_attr: "Tnext_func_args"
7213 #   }
7214 #   input_arg {
7215 #     name: "finalize_func_other_args"
7216 #     type_list_attr: "Tfinalize_func_args"
7217 #   }
7218 #   output_arg {
7219 #     name: "handle"
7220 #     type: DT_VARIANT
7221 #   }
7222 #   attr {
7223 #     name: "init_func"
7224 #     type: "func"
7225 #   }
7226 #   attr {
7227 #     name: "next_func"
7228 #     type: "func"
7229 #   }
7230 #   attr {
7231 #     name: "finalize_func"
7232 #     type: "func"
7233 #   }
7234 #   attr {
7235 #     name: "Tinit_func_args"
7236 #     type: "list(type)"
7237 #     has_minimum: true
7238 #   }
7239 #   attr {
7240 #     name: "Tnext_func_args"
7241 #     type: "list(type)"
7242 #     has_minimum: true
7243 #   }
7244 #   attr {
7245 #     name: "Tfinalize_func_args"
7246 #     type: "list(type)"
7247 #     has_minimum: true
7248 #   }
7249 #   attr {
7250 #     name: "output_types"
7251 #     type: "list(type)"
7252 #     has_minimum: true
7253 #     minimum: 1
7254 #   }
7255 #   attr {
7256 #     name: "output_shapes"
7257 #     type: "list(shape)"
7258 #     has_minimum: true
7259 #     minimum: 1
7260 #   }
7261 #   is_stateful: true
7262 # }
7263 # op {
7264 #   name: "GroupByReducerDataset"
7265 #   input_arg {
7266 #     name: "input_dataset"
7267 #     type: DT_VARIANT
7268 #   }
7269 #   input_arg {
7270 #     name: "key_func_other_arguments"
7271 #     type_list_attr: "Tkey_func_other_arguments"
7272 #   }
7273 #   input_arg {
7274 #     name: "init_func_other_arguments"
7275 #     type_list_attr: "Tinit_func_other_arguments"
7276 #   }
7277 #   input_arg {
7278 #     name: "reduce_func_other_arguments"
7279 #     type_list_attr: "Treduce_func_other_arguments"
7280 #   }
7281 #   input_arg {
7282 #     name: "finalize_func_other_arguments"
7283 #     type_list_attr: "Tfinalize_func_other_arguments"
7284 #   }
7285 #   output_arg {
7286 #     name: "handle"
7287 #     type: DT_VARIANT
7288 #   }
7289 #   attr {
7290 #     name: "key_func"
7291 #     type: "func"
7292 #   }
7293 #   attr {
7294 #     name: "init_func"
7295 #     type: "func"
7296 #   }
7297 #   attr {
7298 #     name: "reduce_func"
7299 #     type: "func"
7300 #   }
7301 #   attr {
7302 #     name: "finalize_func"
7303 #     type: "func"
7304 #   }
7305 #   attr {
7306 #     name: "Tkey_func_other_arguments"
7307 #     type: "list(type)"
7308 #     has_minimum: true
7309 #   }
7310 #   attr {
7311 #     name: "Tinit_func_other_arguments"
7312 #     type: "list(type)"
7313 #     has_minimum: true
7314 #   }
7315 #   attr {
7316 #     name: "Treduce_func_other_arguments"
7317 #     type: "list(type)"
7318 #     has_minimum: true
7319 #   }
7320 #   attr {
7321 #     name: "Tfinalize_func_other_arguments"
7322 #     type: "list(type)"
7323 #     has_minimum: true
7324 #   }
7325 #   attr {
7326 #     name: "output_types"
7327 #     type: "list(type)"
7328 #     has_minimum: true
7329 #     minimum: 1
7330 #   }
7331 #   attr {
7332 #     name: "output_shapes"
7333 #     type: "list(shape)"
7334 #     has_minimum: true
7335 #     minimum: 1
7336 #   }
7337 #   is_stateful: true
7338 # }
7339 # op {
7340 #   name: "GroupByWindowDataset"
7341 #   input_arg {
7342 #     name: "input_dataset"
7343 #     type: DT_VARIANT
7344 #   }
7345 #   input_arg {
7346 #     name: "key_func_other_arguments"
7347 #     type_list_attr: "Tkey_func_other_arguments"
7348 #   }
7349 #   input_arg {
7350 #     name: "reduce_func_other_arguments"
7351 #     type_list_attr: "Treduce_func_other_arguments"
7352 #   }
7353 #   input_arg {
7354 #     name: "window_size_func_other_arguments"
7355 #     type_list_attr: "Twindow_size_func_other_arguments"
7356 #   }
7357 #   output_arg {
7358 #     name: "handle"
7359 #     type: DT_VARIANT
7360 #   }
7361 #   attr {
7362 #     name: "key_func"
7363 #     type: "func"
7364 #   }
7365 #   attr {
7366 #     name: "reduce_func"
7367 #     type: "func"
7368 #   }
7369 #   attr {
7370 #     name: "window_size_func"
7371 #     type: "func"
7372 #   }
7373 #   attr {
7374 #     name: "Tkey_func_other_arguments"
7375 #     type: "list(type)"
7376 #     has_minimum: true
7377 #   }
7378 #   attr {
7379 #     name: "Treduce_func_other_arguments"
7380 #     type: "list(type)"
7381 #     has_minimum: true
7382 #   }
7383 #   attr {
7384 #     name: "Twindow_size_func_other_arguments"
7385 #     type: "list(type)"
7386 #     has_minimum: true
7387 #   }
7388 #   attr {
7389 #     name: "output_types"
7390 #     type: "list(type)"
7391 #     has_minimum: true
7392 #     minimum: 1
7393 #   }
7394 #   attr {
7395 #     name: "output_shapes"
7396 #     type: "list(shape)"
7397 #     has_minimum: true
7398 #     minimum: 1
7399 #   }
7400 # }
7401 # op {
7402 #   name: "InterleaveDataset"
7403 #   input_arg {
7404 #     name: "input_dataset"
7405 #     type: DT_VARIANT
7406 #   }
7407 #   input_arg {
7408 #     name: "other_arguments"
7409 #     type_list_attr: "Targuments"
7410 #   }
7411 #   input_arg {
7412 #     name: "cycle_length"
7413 #     type: DT_INT64
7414 #   }
7415 #   input_arg {
7416 #     name: "block_length"
7417 #     type: DT_INT64
7418 #   }
7419 #   output_arg {
7420 #     name: "handle"
7421 #     type: DT_VARIANT
7422 #   }
7423 #   attr {
7424 #     name: "f"
7425 #     type: "func"
7426 #   }
7427 #   attr {
7428 #     name: "Targuments"
7429 #     type: "list(type)"
7430 #     has_minimum: true
7431 #   }
7432 #   attr {
7433 #     name: "output_types"
7434 #     type: "list(type)"
7435 #     has_minimum: true
7436 #     minimum: 1
7437 #   }
7438 #   attr {
7439 #     name: "output_shapes"
7440 #     type: "list(shape)"
7441 #     has_minimum: true
7442 #     minimum: 1
7443 #   }
7444 # }
7445 # op {
7446 #   name: "Iterator"
7447 #   output_arg {
7448 #     name: "handle"
7449 #     type: DT_RESOURCE
7450 #   }
7451 #   attr {
7452 #     name: "shared_name"
7453 #     type: "string"
7454 #   }
7455 #   attr {
7456 #     name: "container"
7457 #     type: "string"
7458 #   }
7459 #   attr {
7460 #     name: "output_types"
7461 #     type: "list(type)"
7462 #     has_minimum: true
7463 #     minimum: 1
7464 #   }
7465 #   attr {
7466 #     name: "output_shapes"
7467 #     type: "list(shape)"
7468 #     has_minimum: true
7469 #     minimum: 1
7470 #   }
7471 #   is_stateful: true
7472 # }
7473 # op {
7474 #   name: "IteratorFromStringHandle"
7475 #   input_arg {
7476 #     name: "string_handle"
7477 #     type: DT_STRING
7478 #   }
7479 #   output_arg {
7480 #     name: "resource_handle"
7481 #     type: DT_RESOURCE
7482 #   }
7483 #   attr {
7484 #     name: "output_types"
7485 #     type: "list(type)"
7486 #     default_value {
7487 #       list {
7488 #       }
7489 #     }
7490 #     has_minimum: true
7491 #   }
7492 #   attr {
7493 #     name: "output_shapes"
7494 #     type: "list(shape)"
7495 #     default_value {
7496 #       list {
7497 #       }
7498 #     }
7499 #     has_minimum: true
7500 #   }
7501 #   is_stateful: true
7502 # }
7503 # op {
7504 #   name: "IteratorFromStringHandleV2"
7505 #   input_arg {
7506 #     name: "string_handle"
7507 #     type: DT_STRING
7508 #   }
7509 #   output_arg {
7510 #     name: "resource_handle"
7511 #     type: DT_RESOURCE
7512 #   }
7513 #   attr {
7514 #     name: "output_types"
7515 #     type: "list(type)"
7516 #     default_value {
7517 #       list {
7518 #       }
7519 #     }
7520 #     has_minimum: true
7521 #   }
7522 #   attr {
7523 #     name: "output_shapes"
7524 #     type: "list(shape)"
7525 #     default_value {
7526 #       list {
7527 #       }
7528 #     }
7529 #     has_minimum: true
7530 #   }
7531 #   is_stateful: true
7532 # }
7533 # op {
7534 #   name: "IteratorGetNext"
7535 #   input_arg {
7536 #     name: "iterator"
7537 #     type: DT_RESOURCE
7538 #   }
7539 #   output_arg {
7540 #     name: "components"
7541 #     type_list_attr: "output_types"
7542 #   }
7543 #   attr {
7544 #     name: "output_types"
7545 #     type: "list(type)"
7546 #     has_minimum: true
7547 #     minimum: 1
7548 #   }
7549 #   attr {
7550 #     name: "output_shapes"
7551 #     type: "list(shape)"
7552 #     has_minimum: true
7553 #     minimum: 1
7554 #   }
7555 #   is_stateful: true
7556 # }
7557 # op {
7558 #   name: "IteratorGetNextAsOptional"
7559 #   input_arg {
7560 #     name: "iterator"
7561 #     type: DT_RESOURCE
7562 #   }
7563 #   output_arg {
7564 #     name: "optional"
7565 #     type: DT_VARIANT
7566 #   }
7567 #   attr {
7568 #     name: "output_types"
7569 #     type: "list(type)"
7570 #     has_minimum: true
7571 #     minimum: 1
7572 #   }
7573 #   attr {
7574 #     name: "output_shapes"
7575 #     type: "list(shape)"
7576 #     has_minimum: true
7577 #     minimum: 1
7578 #   }
7579 #   is_stateful: true
7580 # }
7581 # op {
7582 #   name: "IteratorGetNextSync"
7583 #   input_arg {
7584 #     name: "iterator"
7585 #     type: DT_RESOURCE
7586 #   }
7587 #   output_arg {
7588 #     name: "components"
7589 #     type_list_attr: "output_types"
7590 #   }
7591 #   attr {
7592 #     name: "output_types"
7593 #     type: "list(type)"
7594 #     has_minimum: true
7595 #     minimum: 1
7596 #   }
7597 #   attr {
7598 #     name: "output_shapes"
7599 #     type: "list(shape)"
7600 #     has_minimum: true
7601 #     minimum: 1
7602 #   }
7603 #   is_stateful: true
7604 # }
7605 # op {
7606 #   name: "IteratorToStringHandle"
7607 #   input_arg {
7608 #     name: "resource_handle"
7609 #     type: DT_RESOURCE
7610 #   }
7611 #   output_arg {
7612 #     name: "string_handle"
7613 #     type: DT_STRING
7614 #   }
7615 #   is_stateful: true
7616 # }
7617 # op {
7618 #   name: "IteratorV2"
7619 #   output_arg {
7620 #     name: "handle"
7621 #     type: DT_RESOURCE
7622 #   }
7623 #   attr {
7624 #     name: "shared_name"
7625 #     type: "string"
7626 #   }
7627 #   attr {
7628 #     name: "container"
7629 #     type: "string"
7630 #   }
7631 #   attr {
7632 #     name: "output_types"
7633 #     type: "list(type)"
7634 #     has_minimum: true
7635 #     minimum: 1
7636 #   }
7637 #   attr {
7638 #     name: "output_shapes"
7639 #     type: "list(shape)"
7640 #     has_minimum: true
7641 #     minimum: 1
7642 #   }
7643 #   is_stateful: true
7644 # }
7645 # op {
7646 #   name: "LatencyStatsDataset"
7647 #   input_arg {
7648 #     name: "input_dataset"
7649 #     type: DT_VARIANT
7650 #   }
7651 #   input_arg {
7652 #     name: "tag"
7653 #     type: DT_STRING
7654 #   }
7655 #   output_arg {
7656 #     name: "handle"
7657 #     type: DT_VARIANT
7658 #   }
7659 #   attr {
7660 #     name: "output_types"
7661 #     type: "list(type)"
7662 #     has_minimum: true
7663 #     minimum: 1
7664 #   }
7665 #   attr {
7666 #     name: "output_shapes"
7667 #     type: "list(shape)"
7668 #     has_minimum: true
7669 #     minimum: 1
7670 #   }
7671 # }
7672 # op {
7673 #   name: "MakeIterator"
7674 #   input_arg {
7675 #     name: "dataset"
7676 #     type: DT_VARIANT
7677 #   }
7678 #   input_arg {
7679 #     name: "iterator"
7680 #     type: DT_RESOURCE
7681 #   }
7682 #   is_stateful: true
7683 # }
7684 # op {
7685 #   name: "MapAndBatchDataset"
7686 #   input_arg {
7687 #     name: "input_dataset"
7688 #     type: DT_VARIANT
7689 #   }
7690 #   input_arg {
7691 #     name: "other_arguments"
7692 #     type_list_attr: "Targuments"
7693 #   }
7694 #   input_arg {
7695 #     name: "batch_size"
7696 #     type: DT_INT64
7697 #   }
7698 #   input_arg {
7699 #     name: "num_parallel_batches"
7700 #     type: DT_INT64
7701 #   }
7702 #   input_arg {
7703 #     name: "drop_remainder"
7704 #     type: DT_BOOL
7705 #   }
7706 #   output_arg {
7707 #     name: "handle"
7708 #     type: DT_VARIANT
7709 #   }
7710 #   attr {
7711 #     name: "f"
7712 #     type: "func"
7713 #   }
7714 #   attr {
7715 #     name: "Targuments"
7716 #     type: "list(type)"
7717 #     has_minimum: true
7718 #   }
7719 #   attr {
7720 #     name: "output_types"
7721 #     type: "list(type)"
7722 #     has_minimum: true
7723 #     minimum: 1
7724 #   }
7725 #   attr {
7726 #     name: "output_shapes"
7727 #     type: "list(shape)"
7728 #     has_minimum: true
7729 #     minimum: 1
7730 #   }
7731 # }
7732 # op {
7733 #   name: "MapAndBatchDatasetV2"
7734 #   input_arg {
7735 #     name: "input_dataset"
7736 #     type: DT_VARIANT
7737 #   }
7738 #   input_arg {
7739 #     name: "other_arguments"
7740 #     type_list_attr: "Targuments"
7741 #   }
7742 #   input_arg {
7743 #     name: "batch_size"
7744 #     type: DT_INT64
7745 #   }
7746 #   input_arg {
7747 #     name: "num_parallel_calls"
7748 #     type: DT_INT64
7749 #   }
7750 #   input_arg {
7751 #     name: "drop_remainder"
7752 #     type: DT_BOOL
7753 #   }
7754 #   output_arg {
7755 #     name: "handle"
7756 #     type: DT_VARIANT
7757 #   }
7758 #   attr {
7759 #     name: "f"
7760 #     type: "func"
7761 #   }
7762 #   attr {
7763 #     name: "Targuments"
7764 #     type: "list(type)"
7765 #     has_minimum: true
7766 #   }
7767 #   attr {
7768 #     name: "output_types"
7769 #     type: "list(type)"
7770 #     has_minimum: true
7771 #     minimum: 1
7772 #   }
7773 #   attr {
7774 #     name: "output_shapes"
7775 #     type: "list(shape)"
7776 #     has_minimum: true
7777 #     minimum: 1
7778 #   }
7779 # }
7780 # op {
7781 #   name: "MapDataset"
7782 #   input_arg {
7783 #     name: "input_dataset"
7784 #     type: DT_VARIANT
7785 #   }
7786 #   input_arg {
7787 #     name: "other_arguments"
7788 #     type_list_attr: "Targuments"
7789 #   }
7790 #   output_arg {
7791 #     name: "handle"
7792 #     type: DT_VARIANT
7793 #   }
7794 #   attr {
7795 #     name: "f"
7796 #     type: "func"
7797 #   }
7798 #   attr {
7799 #     name: "Targuments"
7800 #     type: "list(type)"
7801 #     has_minimum: true
7802 #   }
7803 #   attr {
7804 #     name: "output_types"
7805 #     type: "list(type)"
7806 #     has_minimum: true
7807 #     minimum: 1
7808 #   }
7809 #   attr {
7810 #     name: "output_shapes"
7811 #     type: "list(shape)"
7812 #     has_minimum: true
7813 #     minimum: 1
7814 #   }
7815 #   attr {
7816 #     name: "use_inter_op_parallelism"
7817 #     type: "bool"
7818 #     default_value {
7819 #       b: true
7820 #     }
7821 #   }
7822 # }
7823 # op {
7824 #   name: "MapDefun"
7825 #   input_arg {
7826 #     name: "arguments"
7827 #     type_list_attr: "Targuments"
7828 #   }
7829 #   output_arg {
7830 #     name: "output"
7831 #     type_list_attr: "output_types"
7832 #   }
7833 #   attr {
7834 #     name: "Targuments"
7835 #     type: "list(type)"
7836 #     has_minimum: true
7837 #     minimum: 1
7838 #   }
7839 #   attr {
7840 #     name: "output_types"
7841 #     type: "list(type)"
7842 #     has_minimum: true
7843 #     minimum: 1
7844 #   }
7845 #   attr {
7846 #     name: "output_shapes"
7847 #     type: "list(shape)"
7848 #     has_minimum: true
7849 #     minimum: 1
7850 #   }
7851 #   attr {
7852 #     name: "f"
7853 #     type: "func"
7854 #   }
7855 # }
7856 # op {
7857 #   name: "ModelDataset"
7858 #   input_arg {
7859 #     name: "input_dataset"
7860 #     type: DT_VARIANT
7861 #   }
7862 #   output_arg {
7863 #     name: "handle"
7864 #     type: DT_VARIANT
7865 #   }
7866 #   attr {
7867 #     name: "output_types"
7868 #     type: "list(type)"
7869 #     has_minimum: true
7870 #     minimum: 1
7871 #   }
7872 #   attr {
7873 #     name: "output_shapes"
7874 #     type: "list(shape)"
7875 #     has_minimum: true
7876 #     minimum: 1
7877 #   }
7878 # }
7879 # op {
7880 #   name: "MultiDeviceIterator"
7881 #   output_arg {
7882 #     name: "handle"
7883 #     type: DT_RESOURCE
7884 #   }
7885 #   attr {
7886 #     name: "devices"
7887 #     type: "list(string)"
7888 #     has_minimum: true
7889 #     minimum: 1
7890 #   }
7891 #   attr {
7892 #     name: "shared_name"
7893 #     type: "string"
7894 #   }
7895 #   attr {
7896 #     name: "container"
7897 #     type: "string"
7898 #   }
7899 #   attr {
7900 #     name: "output_types"
7901 #     type: "list(type)"
7902 #     has_minimum: true
7903 #     minimum: 1
7904 #   }
7905 #   attr {
7906 #     name: "output_shapes"
7907 #     type: "list(shape)"
7908 #     has_minimum: true
7909 #     minimum: 1
7910 #   }
7911 #   is_stateful: true
7912 # }
7913 # op {
7914 #   name: "MultiDeviceIteratorFromStringHandle"
7915 #   input_arg {
7916 #     name: "string_handle"
7917 #     type: DT_STRING
7918 #   }
7919 #   output_arg {
7920 #     name: "multi_device_iterator"
7921 #     type: DT_RESOURCE
7922 #   }
7923 #   attr {
7924 #     name: "output_types"
7925 #     type: "list(type)"
7926 #     default_value {
7927 #       list {
7928 #       }
7929 #     }
7930 #     has_minimum: true
7931 #   }
7932 #   attr {
7933 #     name: "output_shapes"
7934 #     type: "list(shape)"
7935 #     default_value {
7936 #       list {
7937 #       }
7938 #     }
7939 #     has_minimum: true
7940 #   }
7941 #   is_stateful: true
7942 # }
7943 # op {
7944 #   name: "MultiDeviceIteratorGetNextFromShard"
7945 #   input_arg {
7946 #     name: "multi_device_iterator"
7947 #     type: DT_RESOURCE
7948 #   }
7949 #   input_arg {
7950 #     name: "shard_num"
7951 #     type: DT_INT32
7952 #   }
7953 #   input_arg {
7954 #     name: "incarnation_id"
7955 #     type: DT_INT64
7956 #   }
7957 #   output_arg {
7958 #     name: "components"
7959 #     type_list_attr: "output_types"
7960 #   }
7961 #   attr {
7962 #     name: "output_types"
7963 #     type: "list(type)"
7964 #     has_minimum: true
7965 #     minimum: 1
7966 #   }
7967 #   attr {
7968 #     name: "output_shapes"
7969 #     type: "list(shape)"
7970 #     has_minimum: true
7971 #     minimum: 1
7972 #   }
7973 #   is_stateful: true
7974 # }
7975 # op {
7976 #   name: "MultiDeviceIteratorInit"
7977 #   input_arg {
7978 #     name: "dataset"
7979 #     type: DT_VARIANT
7980 #   }
7981 #   input_arg {
7982 #     name: "multi_device_iterator"
7983 #     type: DT_RESOURCE
7984 #   }
7985 #   input_arg {
7986 #     name: "max_buffer_size"
7987 #     type: DT_INT64
7988 #   }
7989 #   output_arg {
7990 #     name: "incarnation_id"
7991 #     type: DT_INT64
7992 #   }
7993 #   is_stateful: true
7994 # }
7995 # op {
7996 #   name: "MultiDeviceIteratorToStringHandle"
7997 #   input_arg {
7998 #     name: "multi_device_iterator"
7999 #     type: DT_RESOURCE
8000 #   }
8001 #   output_arg {
8002 #     name: "string_handle"
8003 #     type: DT_STRING
8004 #   }
8005 #   is_stateful: true
8006 # }
8007 # op {
8008 #   name: "OneShotIterator"
8009 #   output_arg {
8010 #     name: "handle"
8011 #     type: DT_RESOURCE
8012 #   }
8013 #   attr {
8014 #     name: "dataset_factory"
8015 #     type: "func"
8016 #   }
8017 #   attr {
8018 #     name: "output_types"
8019 #     type: "list(type)"
8020 #     has_minimum: true
8021 #     minimum: 1
8022 #   }
8023 #   attr {
8024 #     name: "output_shapes"
8025 #     type: "list(shape)"
8026 #     has_minimum: true
8027 #     minimum: 1
8028 #   }
8029 #   attr {
8030 #     name: "container"
8031 #     type: "string"
8032 #     default_value {
8033 #       s: ""
8034 #     }
8035 #   }
8036 #   attr {
8037 #     name: "shared_name"
8038 #     type: "string"
8039 #     default_value {
8040 #       s: ""
8041 #     }
8042 #   }
8043 #   is_stateful: true
8044 # }
8045 # op {
8046 #   name: "OptimizeDataset"
8047 #   input_arg {
8048 #     name: "input_dataset"
8049 #     type: DT_VARIANT
8050 #   }
8051 #   input_arg {
8052 #     name: "optimizations"
8053 #     type: DT_STRING
8054 #   }
8055 #   output_arg {
8056 #     name: "handle"
8057 #     type: DT_VARIANT
8058 #   }
8059 #   attr {
8060 #     name: "output_types"
8061 #     type: "list(type)"
8062 #     has_minimum: true
8063 #     minimum: 1
8064 #   }
8065 #   attr {
8066 #     name: "output_shapes"
8067 #     type: "list(shape)"
8068 #     has_minimum: true
8069 #     minimum: 1
8070 #   }
8071 # }
8072 # op {
8073 #   name: "OptionalFromValue"
8074 #   input_arg {
8075 #     name: "components"
8076 #     type_list_attr: "Toutput_types"
8077 #   }
8078 #   output_arg {
8079 #     name: "optional"
8080 #     type: DT_VARIANT
8081 #   }
8082 #   attr {
8083 #     name: "Toutput_types"
8084 #     type: "list(type)"
8085 #     has_minimum: true
8086 #     minimum: 1
8087 #   }
8088 # }
8089 # op {
8090 #   name: "OptionalGetValue"
8091 #   input_arg {
8092 #     name: "optional"
8093 #     type: DT_VARIANT
8094 #   }
8095 #   output_arg {
8096 #     name: "components"
8097 #     type_list_attr: "output_types"
8098 #   }
8099 #   attr {
8100 #     name: "output_types"
8101 #     type: "list(type)"
8102 #     has_minimum: true
8103 #     minimum: 1
8104 #   }
8105 #   attr {
8106 #     name: "output_shapes"
8107 #     type: "list(shape)"
8108 #     has_minimum: true
8109 #     minimum: 1
8110 #   }
8111 # }
8112 # op {
8113 #   name: "OptionalHasValue"
8114 #   input_arg {
8115 #     name: "optional"
8116 #     type: DT_VARIANT
8117 #   }
8118 #   output_arg {
8119 #     name: "has_value"
8120 #     type: DT_BOOL
8121 #   }
8122 # }
8123 # op {
8124 #   name: "OptionalNone"
8125 #   output_arg {
8126 #     name: "optional"
8127 #     type: DT_VARIANT
8128 #   }
8129 # }
8130 # op {
8131 #   name: "PaddedBatchDataset"
8132 #   input_arg {
8133 #     name: "input_dataset"
8134 #     type: DT_VARIANT
8135 #   }
8136 #   input_arg {
8137 #     name: "batch_size"
8138 #     type: DT_INT64
8139 #   }
8140 #   input_arg {
8141 #     name: "padded_shapes"
8142 #     type: DT_INT64
8143 #     number_attr: "N"
8144 #   }
8145 #   input_arg {
8146 #     name: "padding_values"
8147 #     type_list_attr: "Toutput_types"
8148 #   }
8149 #   output_arg {
8150 #     name: "handle"
8151 #     type: DT_VARIANT
8152 #   }
8153 #   attr {
8154 #     name: "Toutput_types"
8155 #     type: "list(type)"
8156 #     has_minimum: true
8157 #     minimum: 1
8158 #   }
8159 #   attr {
8160 #     name: "output_shapes"
8161 #     type: "list(shape)"
8162 #     has_minimum: true
8163 #     minimum: 1
8164 #   }
8165 #   attr {
8166 #     name: "N"
8167 #     type: "int"
8168 #     has_minimum: true
8169 #     minimum: 1
8170 #   }
8171 # }
8172 # op {
8173 #   name: "PaddedBatchDatasetV2"
8174 #   input_arg {
8175 #     name: "input_dataset"
8176 #     type: DT_VARIANT
8177 #   }
8178 #   input_arg {
8179 #     name: "batch_size"
8180 #     type: DT_INT64
8181 #   }
8182 #   input_arg {
8183 #     name: "padded_shapes"
8184 #     type: DT_INT64
8185 #     number_attr: "N"
8186 #   }
8187 #   input_arg {
8188 #     name: "padding_values"
8189 #     type_list_attr: "Toutput_types"
8190 #   }
8191 #   input_arg {
8192 #     name: "drop_remainder"
8193 #     type: DT_BOOL
8194 #   }
8195 #   output_arg {
8196 #     name: "handle"
8197 #     type: DT_VARIANT
8198 #   }
8199 #   attr {
8200 #     name: "Toutput_types"
8201 #     type: "list(type)"
8202 #     has_minimum: true
8203 #     minimum: 1
8204 #   }
8205 #   attr {
8206 #     name: "output_shapes"
8207 #     type: "list(shape)"
8208 #     has_minimum: true
8209 #     minimum: 1
8210 #   }
8211 #   attr {
8212 #     name: "N"
8213 #     type: "int"
8214 #     has_minimum: true
8215 #     minimum: 1
8216 #   }
8217 # }
8218 # op {
8219 #   name: "ParallelInterleaveDataset"
8220 #   input_arg {
8221 #     name: "input_dataset"
8222 #     type: DT_VARIANT
8223 #   }
8224 #   input_arg {
8225 #     name: "other_arguments"
8226 #     type_list_attr: "Targuments"
8227 #   }
8228 #   input_arg {
8229 #     name: "cycle_length"
8230 #     type: DT_INT64
8231 #   }
8232 #   input_arg {
8233 #     name: "block_length"
8234 #     type: DT_INT64
8235 #   }
8236 #   input_arg {
8237 #     name: "sloppy"
8238 #     type: DT_BOOL
8239 #   }
8240 #   input_arg {
8241 #     name: "buffer_output_elements"
8242 #     type: DT_INT64
8243 #   }
8244 #   input_arg {
8245 #     name: "prefetch_input_elements"
8246 #     type: DT_INT64
8247 #   }
8248 #   output_arg {
8249 #     name: "handle"
8250 #     type: DT_VARIANT
8251 #   }
8252 #   attr {
8253 #     name: "f"
8254 #     type: "func"
8255 #   }
8256 #   attr {
8257 #     name: "Targuments"
8258 #     type: "list(type)"
8259 #     has_minimum: true
8260 #   }
8261 #   attr {
8262 #     name: "output_types"
8263 #     type: "list(type)"
8264 #     has_minimum: true
8265 #     minimum: 1
8266 #   }
8267 #   attr {
8268 #     name: "output_shapes"
8269 #     type: "list(shape)"
8270 #     has_minimum: true
8271 #     minimum: 1
8272 #   }
8273 # }
8274 # op {
8275 #   name: "ParallelInterleaveDatasetV2"
8276 #   input_arg {
8277 #     name: "input_dataset"
8278 #     type: DT_VARIANT
8279 #   }
8280 #   input_arg {
8281 #     name: "other_arguments"
8282 #     type_list_attr: "Targuments"
8283 #   }
8284 #   input_arg {
8285 #     name: "cycle_length"
8286 #     type: DT_INT64
8287 #   }
8288 #   input_arg {
8289 #     name: "block_length"
8290 #     type: DT_INT64
8291 #   }
8292 #   input_arg {
8293 #     name: "num_parallel_calls"
8294 #     type: DT_INT64
8295 #   }
8296 #   output_arg {
8297 #     name: "handle"
8298 #     type: DT_VARIANT
8299 #   }
8300 #   attr {
8301 #     name: "f"
8302 #     type: "func"
8303 #   }
8304 #   attr {
8305 #     name: "Targuments"
8306 #     type: "list(type)"
8307 #     has_minimum: true
8308 #   }
8309 #   attr {
8310 #     name: "output_types"
8311 #     type: "list(type)"
8312 #     has_minimum: true
8313 #     minimum: 1
8314 #   }
8315 #   attr {
8316 #     name: "output_shapes"
8317 #     type: "list(shape)"
8318 #     has_minimum: true
8319 #     minimum: 1
8320 #   }
8321 # }
8322 # op {
8323 #   name: "ParallelMapDataset"
8324 #   input_arg {
8325 #     name: "input_dataset"
8326 #     type: DT_VARIANT
8327 #   }
8328 #   input_arg {
8329 #     name: "other_arguments"
8330 #     type_list_attr: "Targuments"
8331 #   }
8332 #   input_arg {
8333 #     name: "num_parallel_calls"
8334 #     type: DT_INT32
8335 #   }
8336 #   output_arg {
8337 #     name: "handle"
8338 #     type: DT_VARIANT
8339 #   }
8340 #   attr {
8341 #     name: "f"
8342 #     type: "func"
8343 #   }
8344 #   attr {
8345 #     name: "Targuments"
8346 #     type: "list(type)"
8347 #     has_minimum: true
8348 #   }
8349 #   attr {
8350 #     name: "output_types"
8351 #     type: "list(type)"
8352 #     has_minimum: true
8353 #     minimum: 1
8354 #   }
8355 #   attr {
8356 #     name: "output_shapes"
8357 #     type: "list(shape)"
8358 #     has_minimum: true
8359 #     minimum: 1
8360 #   }
8361 #   attr {
8362 #     name: "use_inter_op_parallelism"
8363 #     type: "bool"
8364 #     default_value {
8365 #       b: true
8366 #     }
8367 #   }
8368 # }
8369 # op {
8370 #   name: "ParseExampleDataset"
8371 #   input_arg {
8372 #     name: "input_dataset"
8373 #     type: DT_VARIANT
8374 #   }
8375 #   input_arg {
8376 #     name: "num_parallel_calls"
8377 #     type: DT_INT64
8378 #   }
8379 #   input_arg {
8380 #     name: "dense_defaults"
8381 #     type_list_attr: "Tdense"
8382 #   }
8383 #   output_arg {
8384 #     name: "handle"
8385 #     type: DT_VARIANT
8386 #   }
8387 #   attr {
8388 #     name: "sparse_keys"
8389 #     type: "list(string)"
8390 #     has_minimum: true
8391 #   }
8392 #   attr {
8393 #     name: "dense_keys"
8394 #     type: "list(string)"
8395 #     has_minimum: true
8396 #   }
8397 #   attr {
8398 #     name: "sparse_types"
8399 #     type: "list(type)"
8400 #     has_minimum: true
8401 #     allowed_values {
8402 #       list {
8403 #         type: DT_FLOAT
8404 #         type: DT_INT64
8405 #         type: DT_STRING
8406 #       }
8407 #     }
8408 #   }
8409 #   attr {
8410 #     name: "Tdense"
8411 #     type: "list(type)"
8412 #     has_minimum: true
8413 #     allowed_values {
8414 #       list {
8415 #         type: DT_FLOAT
8416 #         type: DT_INT64
8417 #         type: DT_STRING
8418 #       }
8419 #     }
8420 #   }
8421 #   attr {
8422 #     name: "dense_shapes"
8423 #     type: "list(shape)"
8424 #     has_minimum: true
8425 #   }
8426 #   attr {
8427 #     name: "output_types"
8428 #     type: "list(type)"
8429 #     has_minimum: true
8430 #     minimum: 1
8431 #   }
8432 #   attr {
8433 #     name: "output_shapes"
8434 #     type: "list(shape)"
8435 #     has_minimum: true
8436 #     minimum: 1
8437 #   }
8438 # }
8439 # op {
8440 #   name: "PrefetchDataset"
8441 #   input_arg {
8442 #     name: "input_dataset"
8443 #     type: DT_VARIANT
8444 #   }
8445 #   input_arg {
8446 #     name: "buffer_size"
8447 #     type: DT_INT64
8448 #   }
8449 #   output_arg {
8450 #     name: "handle"
8451 #     type: DT_VARIANT
8452 #   }
8453 #   attr {
8454 #     name: "output_types"
8455 #     type: "list(type)"
8456 #     has_minimum: true
8457 #     minimum: 1
8458 #   }
8459 #   attr {
8460 #     name: "output_shapes"
8461 #     type: "list(shape)"
8462 #     has_minimum: true
8463 #     minimum: 1
8464 #   }
8465 # }
8466 # op {
8467 #   name: "PrependFromQueueAndPaddedBatchDataset"
8468 #   input_arg {
8469 #     name: "input_dataset"
8470 #     type: DT_VARIANT
8471 #   }
8472 #   input_arg {
8473 #     name: "batch_size"
8474 #     type: DT_INT64
8475 #   }
8476 #   input_arg {
8477 #     name: "padded_shapes"
8478 #     type: DT_INT64
8479 #     number_attr: "N"
8480 #   }
8481 #   input_arg {
8482 #     name: "padding_values"
8483 #     type_list_attr: "Toutput_types"
8484 #   }
8485 #   output_arg {
8486 #     name: "handle"
8487 #     type: DT_VARIANT
8488 #   }
8489 #   attr {
8490 #     name: "Toutput_types"
8491 #     type: "list(type)"
8492 #     has_minimum: true
8493 #     minimum: 1
8494 #   }
8495 #   attr {
8496 #     name: "output_shapes"
8497 #     type: "list(shape)"
8498 #     has_minimum: true
8499 #     minimum: 1
8500 #   }
8501 #   attr {
8502 #     name: "N"
8503 #     type: "int"
8504 #     has_minimum: true
8505 #     minimum: 1
8506 #   }
8507 # }
8508 # op {
8509 #   name: "RandomDataset"
8510 #   input_arg {
8511 #     name: "seed"
8512 #     type: DT_INT64
8513 #   }
8514 #   input_arg {
8515 #     name: "seed2"
8516 #     type: DT_INT64
8517 #   }
8518 #   output_arg {
8519 #     name: "handle"
8520 #     type: DT_VARIANT
8521 #   }
8522 #   attr {
8523 #     name: "output_types"
8524 #     type: "list(type)"
8525 #     has_minimum: true
8526 #     minimum: 1
8527 #   }
8528 #   attr {
8529 #     name: "output_shapes"
8530 #     type: "list(shape)"
8531 #     has_minimum: true
8532 #     minimum: 1
8533 #   }
8534 #   is_stateful: true
8535 # }
8536 # op {
8537 #   name: "RangeDataset"
8538 #   input_arg {
8539 #     name: "start"
8540 #     type: DT_INT64
8541 #   }
8542 #   input_arg {
8543 #     name: "stop"
8544 #     type: DT_INT64
8545 #   }
8546 #   input_arg {
8547 #     name: "step"
8548 #     type: DT_INT64
8549 #   }
8550 #   output_arg {
8551 #     name: "handle"
8552 #     type: DT_VARIANT
8553 #   }
8554 #   attr {
8555 #     name: "output_types"
8556 #     type: "list(type)"
8557 #     has_minimum: true
8558 #     minimum: 1
8559 #   }
8560 #   attr {
8561 #     name: "output_shapes"
8562 #     type: "list(shape)"
8563 #     has_minimum: true
8564 #     minimum: 1
8565 #   }
8566 #   is_stateful: true
8567 # }
8568 # op {
8569 #   name: "ReduceDataset"
8570 #   input_arg {
8571 #     name: "input_dataset"
8572 #     type: DT_VARIANT
8573 #   }
8574 #   input_arg {
8575 #     name: "initial_state"
8576 #     type_list_attr: "Tstate"
8577 #   }
8578 #   input_arg {
8579 #     name: "other_arguments"
8580 #     type_list_attr: "Targuments"
8581 #   }
8582 #   output_arg {
8583 #     name: "components"
8584 #     type_list_attr: "output_types"
8585 #   }
8586 #   attr {
8587 #     name: "f"
8588 #     type: "func"
8589 #   }
8590 #   attr {
8591 #     name: "Tstate"
8592 #     type: "list(type)"
8593 #     has_minimum: true
8594 #     minimum: 1
8595 #   }
8596 #   attr {
8597 #     name: "Targuments"
8598 #     type: "list(type)"
8599 #     has_minimum: true
8600 #   }
8601 #   attr {
8602 #     name: "output_types"
8603 #     type: "list(type)"
8604 #     has_minimum: true
8605 #     minimum: 1
8606 #   }
8607 #   attr {
8608 #     name: "output_shapes"
8609 #     type: "list(shape)"
8610 #     has_minimum: true
8611 #     minimum: 1
8612 #   }
8613 #   attr {
8614 #     name: "use_inter_op_parallelism"
8615 #     type: "bool"
8616 #     default_value {
8617 #       b: true
8618 #     }
8619 #   }
8620 # }
8621 # op {
8622 #   name: "RepeatDataset"
8623 #   input_arg {
8624 #     name: "input_dataset"
8625 #     type: DT_VARIANT
8626 #   }
8627 #   input_arg {
8628 #     name: "count"
8629 #     type: DT_INT64
8630 #   }
8631 #   output_arg {
8632 #     name: "handle"
8633 #     type: DT_VARIANT
8634 #   }
8635 #   attr {
8636 #     name: "output_types"
8637 #     type: "list(type)"
8638 #     has_minimum: true
8639 #     minimum: 1
8640 #   }
8641 #   attr {
8642 #     name: "output_shapes"
8643 #     type: "list(shape)"
8644 #     has_minimum: true
8645 #     minimum: 1
8646 #   }
8647 # }
8648 # op {
8649 #   name: "ScanDataset"
8650 #   input_arg {
8651 #     name: "input_dataset"
8652 #     type: DT_VARIANT
8653 #   }
8654 #   input_arg {
8655 #     name: "initial_state"
8656 #     type_list_attr: "Tstate"
8657 #   }
8658 #   input_arg {
8659 #     name: "other_arguments"
8660 #     type_list_attr: "Targuments"
8661 #   }
8662 #   output_arg {
8663 #     name: "handle"
8664 #     type: DT_VARIANT
8665 #   }
8666 #   attr {
8667 #     name: "f"
8668 #     type: "func"
8669 #   }
8670 #   attr {
8671 #     name: "Tstate"
8672 #     type: "list(type)"
8673 #     has_minimum: true
8674 #     minimum: 1
8675 #   }
8676 #   attr {
8677 #     name: "Targuments"
8678 #     type: "list(type)"
8679 #     has_minimum: true
8680 #   }
8681 #   attr {
8682 #     name: "output_types"
8683 #     type: "list(type)"
8684 #     has_minimum: true
8685 #     minimum: 1
8686 #   }
8687 #   attr {
8688 #     name: "output_shapes"
8689 #     type: "list(shape)"
8690 #     has_minimum: true
8691 #     minimum: 1
8692 #   }
8693 # }
8694 # op {
8695 #   name: "SerializeIterator"
8696 #   input_arg {
8697 #     name: "resource_handle"
8698 #     type: DT_RESOURCE
8699 #   }
8700 #   output_arg {
8701 #     name: "serialized"
8702 #     type: DT_VARIANT
8703 #   }
8704 #   is_stateful: true
8705 # }
8706 # op {
8707 #   name: "SetStatsAggregatorDataset"
8708 #   input_arg {
8709 #     name: "input_dataset"
8710 #     type: DT_VARIANT
8711 #   }
8712 #   input_arg {
8713 #     name: "stats_aggregator"
8714 #     type: DT_RESOURCE
8715 #   }
8716 #   output_arg {
8717 #     name: "handle"
8718 #     type: DT_VARIANT
8719 #   }
8720 #   attr {
8721 #     name: "output_types"
8722 #     type: "list(type)"
8723 #     has_minimum: true
8724 #     minimum: 1
8725 #   }
8726 #   attr {
8727 #     name: "output_shapes"
8728 #     type: "list(shape)"
8729 #     has_minimum: true
8730 #     minimum: 1
8731 #   }
8732 #   is_stateful: true
8733 # }
8734 # op {
8735 #   name: "ShuffleAndRepeatDataset"
8736 #   input_arg {
8737 #     name: "input_dataset"
8738 #     type: DT_VARIANT
8739 #   }
8740 #   input_arg {
8741 #     name: "buffer_size"
8742 #     type: DT_INT64
8743 #   }
8744 #   input_arg {
8745 #     name: "seed"
8746 #     type: DT_INT64
8747 #   }
8748 #   input_arg {
8749 #     name: "seed2"
8750 #     type: DT_INT64
8751 #   }
8752 #   input_arg {
8753 #     name: "count"
8754 #     type: DT_INT64
8755 #   }
8756 #   output_arg {
8757 #     name: "handle"
8758 #     type: DT_VARIANT
8759 #   }
8760 #   attr {
8761 #     name: "output_types"
8762 #     type: "list(type)"
8763 #     has_minimum: true
8764 #     minimum: 1
8765 #   }
8766 #   attr {
8767 #     name: "output_shapes"
8768 #     type: "list(shape)"
8769 #     has_minimum: true
8770 #     minimum: 1
8771 #   }
8772 # }
8773 # op {
8774 #   name: "ShuffleDataset"
8775 #   input_arg {
8776 #     name: "input_dataset"
8777 #     type: DT_VARIANT
8778 #   }
8779 #   input_arg {
8780 #     name: "buffer_size"
8781 #     type: DT_INT64
8782 #   }
8783 #   input_arg {
8784 #     name: "seed"
8785 #     type: DT_INT64
8786 #   }
8787 #   input_arg {
8788 #     name: "seed2"
8789 #     type: DT_INT64
8790 #   }
8791 #   output_arg {
8792 #     name: "handle"
8793 #     type: DT_VARIANT
8794 #   }
8795 #   attr {
8796 #     name: "reshuffle_each_iteration"
8797 #     type: "bool"
8798 #     default_value {
8799 #       b: true
8800 #     }
8801 #   }
8802 #   attr {
8803 #     name: "output_types"
8804 #     type: "list(type)"
8805 #     has_minimum: true
8806 #     minimum: 1
8807 #   }
8808 #   attr {
8809 #     name: "output_shapes"
8810 #     type: "list(shape)"
8811 #     has_minimum: true
8812 #     minimum: 1
8813 #   }
8814 # }
8815 # op {
8816 #   name: "SinkDataset"
8817 #   input_arg {
8818 #     name: "input_dataset"
8819 #     type: DT_VARIANT
8820 #   }
8821 #   output_arg {
8822 #     name: "handle"
8823 #     type: DT_VARIANT
8824 #   }
8825 # }
8826 # op {
8827 #   name: "SkipDataset"
8828 #   input_arg {
8829 #     name: "input_dataset"
8830 #     type: DT_VARIANT
8831 #   }
8832 #   input_arg {
8833 #     name: "count"
8834 #     type: DT_INT64
8835 #   }
8836 #   output_arg {
8837 #     name: "handle"
8838 #     type: DT_VARIANT
8839 #   }
8840 #   attr {
8841 #     name: "output_types"
8842 #     type: "list(type)"
8843 #     has_minimum: true
8844 #     minimum: 1
8845 #   }
8846 #   attr {
8847 #     name: "output_shapes"
8848 #     type: "list(shape)"
8849 #     has_minimum: true
8850 #     minimum: 1
8851 #   }
8852 # }
8853 # op {
8854 #   name: "SlideDataset"
8855 #   input_arg {
8856 #     name: "input_dataset"
8857 #     type: DT_VARIANT
8858 #   }
8859 #   input_arg {
8860 #     name: "window_size"
8861 #     type: DT_INT64
8862 #   }
8863 #   input_arg {
8864 #     name: "window_shift"
8865 #     type: DT_INT64
8866 #   }
8867 #   input_arg {
8868 #     name: "window_stride"
8869 #     type: DT_INT64
8870 #   }
8871 #   output_arg {
8872 #     name: "handle"
8873 #     type: DT_VARIANT
8874 #   }
8875 #   attr {
8876 #     name: "output_types"
8877 #     type: "list(type)"
8878 #     has_minimum: true
8879 #     minimum: 1
8880 #   }
8881 #   attr {
8882 #     name: "output_shapes"
8883 #     type: "list(shape)"
8884 #     has_minimum: true
8885 #     minimum: 1
8886 #   }
8887 # }
8888 # op {
8889 #   name: "SparseTensorSliceDataset"
8890 #   input_arg {
8891 #     name: "indices"
8892 #     type: DT_INT64
8893 #   }
8894 #   input_arg {
8895 #     name: "values"
8896 #     type_attr: "Tvalues"
8897 #   }
8898 #   input_arg {
8899 #     name: "dense_shape"
8900 #     type: DT_INT64
8901 #   }
8902 #   output_arg {
8903 #     name: "handle"
8904 #     type: DT_VARIANT
8905 #   }
8906 #   attr {
8907 #     name: "Tvalues"
8908 #     type: "type"
8909 #   }
8910 #   is_stateful: true
8911 # }
8912 # op {
8913 #   name: "SqlDataset"
8914 #   input_arg {
8915 #     name: "driver_name"
8916 #     type: DT_STRING
8917 #   }
8918 #   input_arg {
8919 #     name: "data_source_name"
8920 #     type: DT_STRING
8921 #   }
8922 #   input_arg {
8923 #     name: "query"
8924 #     type: DT_STRING
8925 #   }
8926 #   output_arg {
8927 #     name: "handle"
8928 #     type: DT_VARIANT
8929 #   }
8930 #   attr {
8931 #     name: "output_types"
8932 #     type: "list(type)"
8933 #     has_minimum: true
8934 #     minimum: 1
8935 #   }
8936 #   attr {
8937 #     name: "output_shapes"
8938 #     type: "list(shape)"
8939 #     has_minimum: true
8940 #     minimum: 1
8941 #   }
8942 #   is_stateful: true
8943 # }
8944 # op {
8945 #   name: "StatsAggregatorHandle"
8946 #   output_arg {
8947 #     name: "handle"
8948 #     type: DT_RESOURCE
8949 #   }
8950 #   attr {
8951 #     name: "container"
8952 #     type: "string"
8953 #     default_value {
8954 #       s: ""
8955 #     }
8956 #   }
8957 #   attr {
8958 #     name: "shared_name"
8959 #     type: "string"
8960 #     default_value {
8961 #       s: ""
8962 #     }
8963 #   }
8964 #   is_stateful: true
8965 # }
8966 # op {
8967 #   name: "StatsAggregatorSummary"
8968 #   input_arg {
8969 #     name: "iterator"
8970 #     type: DT_RESOURCE
8971 #   }
8972 #   output_arg {
8973 #     name: "summary"
8974 #     type: DT_STRING
8975 #   }
8976 #   is_stateful: true
8977 # }
8978 # op {
8979 #   name: "TFRecordDataset"
8980 #   input_arg {
8981 #     name: "filenames"
8982 #     type: DT_STRING
8983 #   }
8984 #   input_arg {
8985 #     name: "compression_type"
8986 #     type: DT_STRING
8987 #   }
8988 #   input_arg {
8989 #     name: "buffer_size"
8990 #     type: DT_INT64
8991 #   }
8992 #   output_arg {
8993 #     name: "handle"
8994 #     type: DT_VARIANT
8995 #   }
8996 #   is_stateful: true
8997 # }
8998 # op {
8999 #   name: "TakeDataset"
9000 #   input_arg {
9001 #     name: "input_dataset"
9002 #     type: DT_VARIANT
9003 #   }
9004 #   input_arg {
9005 #     name: "count"
9006 #     type: DT_INT64
9007 #   }
9008 #   output_arg {
9009 #     name: "handle"
9010 #     type: DT_VARIANT
9011 #   }
9012 #   attr {
9013 #     name: "output_types"
9014 #     type: "list(type)"
9015 #     has_minimum: true
9016 #     minimum: 1
9017 #   }
9018 #   attr {
9019 #     name: "output_shapes"
9020 #     type: "list(shape)"
9021 #     has_minimum: true
9022 #     minimum: 1
9023 #   }
9024 # }
9025 # op {
9026 #   name: "TensorDataset"
9027 #   input_arg {
9028 #     name: "components"
9029 #     type_list_attr: "Toutput_types"
9030 #   }
9031 #   output_arg {
9032 #     name: "handle"
9033 #     type: DT_VARIANT
9034 #   }
9035 #   attr {
9036 #     name: "Toutput_types"
9037 #     type: "list(type)"
9038 #     has_minimum: true
9039 #     minimum: 1
9040 #   }
9041 #   attr {
9042 #     name: "output_shapes"
9043 #     type: "list(shape)"
9044 #     has_minimum: true
9045 #     minimum: 1
9046 #   }
9047 #   is_stateful: true
9048 # }
9049 # op {
9050 #   name: "TensorSliceDataset"
9051 #   input_arg {
9052 #     name: "components"
9053 #     type_list_attr: "Toutput_types"
9054 #   }
9055 #   output_arg {
9056 #     name: "handle"
9057 #     type: DT_VARIANT
9058 #   }
9059 #   attr {
9060 #     name: "Toutput_types"
9061 #     type: "list(type)"
9062 #     has_minimum: true
9063 #     minimum: 1
9064 #   }
9065 #   attr {
9066 #     name: "output_shapes"
9067 #     type: "list(shape)"
9068 #     has_minimum: true
9069 #     minimum: 1
9070 #   }
9071 #   is_stateful: true
9072 # }
9073 # op {
9074 #   name: "TextLineDataset"
9075 #   input_arg {
9076 #     name: "filenames"
9077 #     type: DT_STRING
9078 #   }
9079 #   input_arg {
9080 #     name: "compression_type"
9081 #     type: DT_STRING
9082 #   }
9083 #   input_arg {
9084 #     name: "buffer_size"
9085 #     type: DT_INT64
9086 #   }
9087 #   output_arg {
9088 #     name: "handle"
9089 #     type: DT_VARIANT
9090 #   }
9091 #   is_stateful: true
9092 # }
9093 # op {
9094 #   name: "UnbatchDataset"
9095 #   input_arg {
9096 #     name: "input_dataset"
9097 #     type: DT_VARIANT
9098 #   }
9099 #   output_arg {
9100 #     name: "handle"
9101 #     type: DT_VARIANT
9102 #   }
9103 #   attr {
9104 #     name: "output_types"
9105 #     type: "list(type)"
9106 #     has_minimum: true
9107 #     minimum: 1
9108 #   }
9109 #   attr {
9110 #     name: "output_shapes"
9111 #     type: "list(shape)"
9112 #     has_minimum: true
9113 #     minimum: 1
9114 #   }
9115 # }
9116 # op {
9117 #   name: "WindowDataset"
9118 #   input_arg {
9119 #     name: "input_dataset"
9120 #     type: DT_VARIANT
9121 #   }
9122 #   input_arg {
9123 #     name: "size"
9124 #     type: DT_INT64
9125 #   }
9126 #   input_arg {
9127 #     name: "shift"
9128 #     type: DT_INT64
9129 #   }
9130 #   input_arg {
9131 #     name: "stride"
9132 #     type: DT_INT64
9133 #   }
9134 #   input_arg {
9135 #     name: "drop_remainder"
9136 #     type: DT_BOOL
9137 #   }
9138 #   output_arg {
9139 #     name: "handle"
9140 #     type: DT_VARIANT
9141 #   }
9142 #   attr {
9143 #     name: "output_types"
9144 #     type: "list(type)"
9145 #     has_minimum: true
9146 #     minimum: 1
9147 #   }
9148 #   attr {
9149 #     name: "output_shapes"
9150 #     type: "list(shape)"
9151 #     has_minimum: true
9152 #     minimum: 1
9153 #   }
9154 # }
9155 # op {
9156 #   name: "ZipDataset"
9157 #   input_arg {
9158 #     name: "input_datasets"
9159 #     type: DT_VARIANT
9160 #     number_attr: "N"
9161 #   }
9162 #   output_arg {
9163 #     name: "handle"
9164 #     type: DT_VARIANT
9165 #   }
9166 #   attr {
9167 #     name: "output_types"
9168 #     type: "list(type)"
9169 #     has_minimum: true
9170 #     minimum: 1
9171 #   }
9172 #   attr {
9173 #     name: "output_shapes"
9174 #     type: "list(shape)"
9175 #     has_minimum: true
9176 #     minimum: 1
9177 #   }
9178 #   attr {
9179 #     name: "N"
9180 #     type: "int"
9181 #     has_minimum: true
9182 #     minimum: 1
9183 #   }
9184 # }
9185 _op_def_lib = _InitOpDefLibrary(b"\nd\n\021AnonymousIterator\032\n\n\006handle\030\024\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\177\n\014BatchDataset\022\021\n\rinput_dataset\030\025\022\016\n\nbatch_size\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\225\001\n\016BatchDatasetV2\022\021\n\rinput_dataset\030\025\022\016\n\nbatch_size\030\t\022\022\n\016drop_remainder\030\n\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\205\001\n\031BytesProducedStatsDataset\022\021\n\rinput_dataset\030\025\022\007\n\003tag\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n}\n\014CacheDataset\022\021\n\rinput_dataset\030\025\022\014\n\010filename\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\212\001\n\022ConcatenateDataset\022\021\n\rinput_dataset\030\025\022\023\n\017another_dataset\030\025\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n.\n\016DatasetToGraph\022\021\n\rinput_dataset\030\025\032\t\n\005graph\030\007\n\203\001\n\026DatasetToSingleElement\022\013\n\007dataset\030\025\032\032\n\ncomponents2\014output_types\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\nJ\n\021DatasetToTFRecord\022\021\n\rinput_dataset\030\025\022\014\n\010filename\030\007\022\024\n\020compression_type\030\007\n\233\001\n\031DenseToSparseBatchDataset\022\021\n\rinput_dataset\030\025\022\016\n\nbatch_size\030\t\022\r\n\trow_shape\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n=\n\023DeserializeIterator\022\023\n\017resource_handle\030\024\022\016\n\nserialized\030\025\210\001\001\n_\n\025EnqueueInQueueDataset\022\t\n\005queue\030\025\022\031\n\ncomponents2\013Tcomponents\"\035\n\013Tcomponents\022\nlist(type)(\0010\001\210\001\001\n\177\n\034FilterByLastComponentDataset\022\021\n\rinput_dataset\030\025\032\n\n\006output\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\276\001\n\rFilterDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\032\n\n\006handle\030\025\"\021\n\tpredicate\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\177\n\030FixedLengthRecordDataset\022\r\n\tfilenames\030\007\022\020\n\014header_bytes\030\t\022\020\n\014record_bytes\030\t\022\020\n\014footer_bytes\030\t\022\017\n\013buffer_size\030\t\032\n\n\006handle\030\025\210\001\001\n\267\001\n\016FlatMapDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\212\003\n\020GeneratorDataset\022\'\n\024init_func_other_args2\017Tinit_func_args\022\'\n\024next_func_other_args2\017Tnext_func_args\022/\n\030finalize_func_other_args2\023Tfinalize_func_args\032\n\n\006handle\030\025\"\021\n\tinit_func\022\004func\"\021\n\tnext_func\022\004func\"\025\n\rfinalize_func\022\004func\"\037\n\017Tinit_func_args\022\nlist(type)(\001\"\037\n\017Tnext_func_args\022\nlist(type)(\001\"#\n\023Tfinalize_func_args\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\357\004\n\025GroupByReducerDataset\022\021\n\rinput_dataset\030\025\0225\n\030key_func_other_arguments2\031Tkey_func_other_arguments\0227\n\031init_func_other_arguments2\032Tinit_func_other_arguments\022;\n\033reduce_func_other_arguments2\034Treduce_func_other_arguments\022?\n\035finalize_func_other_arguments2\036Tfinalize_func_other_arguments\032\n\n\006handle\030\025\"\020\n\010key_func\022\004func\"\021\n\tinit_func\022\004func\"\023\n\013reduce_func\022\004func\"\025\n\rfinalize_func\022\004func\")\n\031Tkey_func_other_arguments\022\nlist(type)(\001\"*\n\032Tinit_func_other_arguments\022\nlist(type)(\001\",\n\034Treduce_func_other_arguments\022\nlist(type)(\001\".\n\036Tfinalize_func_other_arguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\377\003\n\024GroupByWindowDataset\022\021\n\rinput_dataset\030\025\0225\n\030key_func_other_arguments2\031Tkey_func_other_arguments\022;\n\033reduce_func_other_arguments2\034Treduce_func_other_arguments\022E\n window_size_func_other_arguments2!Twindow_size_func_other_arguments\032\n\n\006handle\030\025\"\020\n\010key_func\022\004func\"\023\n\013reduce_func\022\004func\"\030\n\020window_size_func\022\004func\")\n\031Tkey_func_other_arguments\022\nlist(type)(\001\",\n\034Treduce_func_other_arguments\022\nlist(type)(\001\"1\n!Twindow_size_func_other_arguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\336\001\n\021InterleaveDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\022\020\n\014cycle_length\030\t\022\020\n\014block_length\030\t\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\207\001\n\010Iterator\032\n\n\006handle\030\024\"\025\n\013shared_name\022\006string\"\023\n\tcontainer\022\006string\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\213\001\n\030IteratorFromStringHandle\022\021\n\rstring_handle\030\007\032\023\n\017resource_handle\030\024\" \n\014output_types\022\nlist(type)\032\002\n\000(\001\"\"\n\routput_shapes\022\013list(shape)\032\002\n\000(\001\210\001\001\n\215\001\n\032IteratorFromStringHandleV2\022\021\n\rstring_handle\030\007\032\023\n\017resource_handle\030\024\" \n\014output_types\022\nlist(type)\032\002\n\000(\001\"\"\n\routput_shapes\022\013list(shape)\032\002\n\000(\001\210\001\001\n\200\001\n\017IteratorGetNext\022\014\n\010iterator\030\024\032\032\n\ncomponents2\014output_types\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n|\n\031IteratorGetNextAsOptional\022\014\n\010iterator\030\024\032\014\n\010optional\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\204\001\n\023IteratorGetNextSync\022\014\n\010iterator\030\024\032\032\n\ncomponents2\014output_types\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\nC\n\026IteratorToStringHandle\022\023\n\017resource_handle\030\024\032\021\n\rstring_handle\030\007\210\001\001\n\211\001\n\nIteratorV2\032\n\n\006handle\030\024\"\025\n\013shared_name\022\006string\"\023\n\tcontainer\022\006string\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\177\n\023LatencyStatsDataset\022\021\n\rinput_dataset\030\025\022\007\n\003tag\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n,\n\014MakeIterator\022\013\n\007dataset\030\025\022\014\n\010iterator\030\024\210\001\001\n\371\001\n\022MapAndBatchDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\022\016\n\nbatch_size\030\t\022\030\n\024num_parallel_batches\030\t\022\022\n\016drop_remainder\030\n\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\371\001\n\024MapAndBatchDatasetV2\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\022\016\n\nbatch_size\030\t\022\026\n\022num_parallel_calls\030\t\022\022\n\016drop_remainder\030\n\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\331\001\n\nMapDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"$\n\030use_inter_op_parallelism\022\004bool\032\002(\001\n\246\001\n\010MapDefun\022\027\n\targuments2\nTarguments\032\026\n\006output2\014output_types\"\034\n\nTarguments\022\nlist(type)(\0010\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"\t\n\001f\022\004func\no\n\014ModelDataset\022\021\n\rinput_dataset\030\025\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\257\001\n\023MultiDeviceIterator\032\n\n\006handle\030\024\"\033\n\007devices\022\014list(string)(\0010\001\"\025\n\013shared_name\022\006string\"\023\n\tcontainer\022\006string\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\234\001\n#MultiDeviceIteratorFromStringHandle\022\021\n\rstring_handle\030\007\032\031\n\025multi_device_iterator\030\024\" \n\014output_types\022\nlist(type)\032\002\n\000(\001\"\"\n\routput_shapes\022\013list(shape)\032\002\n\000(\001\210\001\001\n\304\001\n#MultiDeviceIteratorGetNextFromShard\022\031\n\025multi_device_iterator\030\024\022\r\n\tshard_num\030\003\022\022\n\016incarnation_id\030\t\032\032\n\ncomponents2\014output_types\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\nm\n\027MultiDeviceIteratorInit\022\013\n\007dataset\030\025\022\031\n\025multi_device_iterator\030\024\022\023\n\017max_buffer_size\030\t\032\022\n\016incarnation_id\030\t\210\001\001\nT\n!MultiDeviceIteratorToStringHandle\022\031\n\025multi_device_iterator\030\024\032\021\n\rstring_handle\030\007\210\001\001\n\257\001\n\017OneShotIterator\032\n\n\006handle\030\024\"\027\n\017dataset_factory\022\004func\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"\027\n\tcontainer\022\006string\032\002\022\000\"\031\n\013shared_name\022\006string\032\002\022\000\210\001\001\n\205\001\n\017OptimizeDataset\022\021\n\rinput_dataset\030\025\022\021\n\roptimizations\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n_\n\021OptionalFromValue\022\033\n\ncomponents2\rToutput_types\032\014\n\010optional\030\025\"\037\n\rToutput_types\022\nlist(type)(\0010\001\n~\n\020OptionalGetValue\022\014\n\010optional\030\025\032\032\n\ncomponents2\014output_types\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n/\n\020OptionalHasValue\022\014\n\010optional\030\025\032\r\n\thas_value\030\n\n\034\n\014OptionalNone\032\014\n\010optional\030\025\n\313\001\n\022PaddedBatchDataset\022\021\n\rinput_dataset\030\025\022\016\n\nbatch_size\030\t\022\024\n\rpadded_shapes\030\t*\001N\022\037\n\016padding_values2\rToutput_types\032\n\n\006handle\030\025\"\037\n\rToutput_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"\014\n\001N\022\003int(\0010\001\n\341\001\n\024PaddedBatchDatasetV2\022\021\n\rinput_dataset\030\025\022\016\n\nbatch_size\030\t\022\024\n\rpadded_shapes\030\t*\001N\022\037\n\016padding_values2\rToutput_types\022\022\n\016drop_remainder\030\n\032\n\n\006handle\030\025\"\037\n\rToutput_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"\014\n\001N\022\003int(\0010\001\n\253\002\n\031ParallelInterleaveDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\022\020\n\014cycle_length\030\t\022\020\n\014block_length\030\t\022\n\n\006sloppy\030\n\022\032\n\026buffer_output_elements\030\t\022\033\n\027prefetch_input_elements\030\t\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\200\002\n\033ParallelInterleaveDatasetV2\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\022\020\n\014cycle_length\030\t\022\020\n\014block_length\030\t\022\026\n\022num_parallel_calls\030\t\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\371\001\n\022ParallelMapDataset\022\021\n\rinput_dataset\030\025\022\035\n\017other_arguments2\nTarguments\022\026\n\022num_parallel_calls\030\003\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"$\n\030use_inter_op_parallelism\022\004bool\032\002(\001\n\314\002\n\023ParseExampleDataset\022\021\n\rinput_dataset\030\025\022\026\n\022num_parallel_calls\030\t\022\030\n\016dense_defaults2\006Tdense\032\n\n\006handle\030\025\"\035\n\013sparse_keys\022\014list(string)(\001\"\034\n\ndense_keys\022\014list(string)(\001\"%\n\014sparse_types\022\nlist(type)(\001:\007\n\0052\003\001\t\007\"\037\n\006Tdense\022\nlist(type)(\001:\007\n\0052\003\001\t\007\"\035\n\014dense_shapes\022\013list(shape)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\203\001\n\017PrefetchDataset\022\021\n\rinput_dataset\030\025\022\017\n\013buffer_size\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\336\001\n%PrependFromQueueAndPaddedBatchDataset\022\021\n\rinput_dataset\030\025\022\016\n\nbatch_size\030\t\022\024\n\rpadded_shapes\030\t*\001N\022\037\n\016padding_values2\rToutput_types\032\n\n\006handle\030\025\"\037\n\rToutput_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"\014\n\001N\022\003int(\0010\001\nu\n\rRandomDataset\022\010\n\004seed\030\t\022\t\n\005seed2\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n~\n\014RangeDataset\022\t\n\005start\030\t\022\010\n\004stop\030\t\022\010\n\004step\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\237\002\n\rReduceDataset\022\021\n\rinput_dataset\030\025\022\027\n\rinitial_state2\006Tstate\022\035\n\017other_arguments2\nTarguments\032\032\n\ncomponents2\014output_types\"\t\n\001f\022\004func\"\030\n\006Tstate\022\nlist(type)(\0010\001\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"$\n\030use_inter_op_parallelism\022\004bool\032\002(\001\n{\n\rRepeatDataset\022\021\n\rinput_dataset\030\025\022\t\n\005count\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\347\001\n\013ScanDataset\022\021\n\rinput_dataset\030\025\022\027\n\rinitial_state2\006Tstate\022\035\n\017other_arguments2\nTarguments\032\n\n\006handle\030\025\"\t\n\001f\022\004func\"\030\n\006Tstate\022\nlist(type)(\0010\001\"\032\n\nTarguments\022\nlist(type)(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n;\n\021SerializeIterator\022\023\n\017resource_handle\030\024\032\016\n\nserialized\030\025\210\001\001\n\225\001\n\031SetStatsAggregatorDataset\022\021\n\rinput_dataset\030\025\022\024\n\020stats_aggregator\030\024\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\253\001\n\027ShuffleAndRepeatDataset\022\021\n\rinput_dataset\030\025\022\017\n\013buffer_size\030\t\022\010\n\004seed\030\t\022\t\n\005seed2\030\t\022\t\n\005count\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\275\001\n\016ShuffleDataset\022\021\n\rinput_dataset\030\025\022\017\n\013buffer_size\030\t\022\010\n\004seed\030\t\022\t\n\005seed2\030\t\032\n\n\006handle\030\025\"$\n\030reshuffle_each_iteration\022\004bool\032\002(\001\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n,\n\013SinkDataset\022\021\n\rinput_dataset\030\025\032\n\n\006handle\030\025\ny\n\013SkipDataset\022\021\n\rinput_dataset\030\025\022\t\n\005count\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\245\001\n\014SlideDataset\022\021\n\rinput_dataset\030\025\022\017\n\013window_size\030\t\022\020\n\014window_shift\030\t\022\021\n\rwindow_stride\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\nk\n\030SparseTensorSliceDataset\022\013\n\007indices\030\t\022\021\n\006values\"\007Tvalues\022\017\n\013dense_shape\030\t\032\n\n\006handle\030\025\"\017\n\007Tvalues\022\004type\210\001\001\n\217\001\n\nSqlDataset\022\017\n\013driver_name\030\007\022\024\n\020data_source_name\030\007\022\t\n\005query\030\007\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\nZ\n\025StatsAggregatorHandle\032\n\n\006handle\030\024\"\027\n\tcontainer\022\006string\032\002\022\000\"\031\n\013shared_name\022\006string\032\002\022\000\210\001\001\n6\n\026StatsAggregatorSummary\022\014\n\010iterator\030\024\032\013\n\007summary\030\007\210\001\001\nV\n\017TFRecordDataset\022\r\n\tfilenames\030\007\022\024\n\020compression_type\030\007\022\017\n\013buffer_size\030\t\032\n\n\006handle\030\025\210\001\001\ny\n\013TakeDataset\022\021\n\rinput_dataset\030\025\022\t\n\005count\030\t\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n~\n\rTensorDataset\022\033\n\ncomponents2\rToutput_types\032\n\n\006handle\030\025\"\037\n\rToutput_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\n\203\001\n\022TensorSliceDataset\022\033\n\ncomponents2\rToutput_types\032\n\n\006handle\030\025\"\037\n\rToutput_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\210\001\001\nV\n\017TextLineDataset\022\r\n\tfilenames\030\007\022\024\n\020compression_type\030\007\022\017\n\013buffer_size\030\t\032\n\n\006handle\030\025\210\001\001\nq\n\016UnbatchDataset\022\021\n\rinput_dataset\030\025\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\245\001\n\rWindowDataset\022\021\n\rinput_dataset\030\025\022\010\n\004size\030\t\022\t\n\005shift\030\t\022\n\n\006stride\030\t\022\022\n\016drop_remainder\030\n\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\n\177\n\nZipDataset\022\025\n\016input_datasets\030\025*\001N\032\n\n\006handle\030\025\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"\014\n\001N\022\003int(\0010\001")